From 625408b80b099c75bb633c9cf5df648fbce8503b Mon Sep 17 00:00:00 2001 From: Hittherhod Date: Wed, 11 Oct 2023 12:02:30 -0400 Subject: [PATCH] v2023-08-01 WorkspaceRP Swagger Spec Updated with Latest Changes (#32325) * Regenerated from workspaceRP.json * Regenerated REST client from updated workspaceRP.json * Regenerated REST client from updated workspaceRP.json * Fixed updated naming in _ml_client import * Fixed naming in _feature_set_operations --------- Co-authored-by: Sam Rhody --- sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py | 4 +- .../v2023_08_01_preview/__init__.py | 20 +- ...py => _azure_machine_learning_services.py} | 234 +- .../v2023_08_01_preview/_configuration.py | 60 +- .../_restclient/v2023_08_01_preview/_patch.py | 2 +- .../v2023_08_01_preview/_serialization.py | 1992 + .../v2023_08_01_preview/_vendor.py | 13 +- .../v2023_08_01_preview/aio/__init__.py | 20 +- ...py => _azure_machine_learning_services.py} | 213 +- .../v2023_08_01_preview/aio/_configuration.py | 56 +- .../v2023_08_01_preview/aio/_patch.py | 2 +- .../aio/operations/__init__.py | 149 +- .../_batch_deployments_operations.py | 758 +- .../operations/_batch_endpoints_operations.py | 784 +- .../operations/_code_containers_operations.py | 372 +- .../operations/_code_versions_operations.py | 535 +- .../_component_containers_operations.py | 372 +- .../_component_versions_operations.py | 398 +- .../aio/operations/_compute_operations.py | 1657 +- .../operations/_data_containers_operations.py | 372 +- .../operations/_data_versions_operations.py | 402 +- .../aio/operations/_datastores_operations.py | 454 +- .../_environment_containers_operations.py | 373 +- .../_environment_versions_operations.py | 398 +- .../aio/operations/_features_operations.py | 194 +- .../_featureset_containers_operations.py | 538 +- .../_featureset_versions_operations.py | 779 +- ...aturestore_entity_containers_operations.py | 539 +- ...featurestore_entity_versions_operations.py | 563 +- .../aio/operations/_jobs_operations.py | 696 +- .../operations/_labeling_jobs_operations.py | 833 +- .../_managed_network_provisions_operations.py | 252 +- ...anaged_network_settings_rule_operations.py | 498 +- .../_model_containers_operations.py | 375 +- .../operations/_model_versions_operations.py | 633 +- .../_online_deployments_operations.py | 986 +- .../_online_endpoints_operations.py | 1062 +- .../aio/operations/_operations.py | 107 +- .../aio/operations/_patch.py | 20 + ...private_endpoint_connections_operations.py | 365 +- .../_private_link_resources_operations.py | 117 +- .../aio/operations/_quotas_operations.py | 218 +- .../aio/operations/_registries_operations.py | 884 +- .../_registry_code_containers_operations.py | 519 +- .../_registry_code_versions_operations.py | 680 +- ...egistry_component_containers_operations.py | 519 +- ..._registry_component_versions_operations.py | 540 +- .../_registry_data_containers_operations.py | 519 +- .../_registry_data_versions_operations.py | 690 +- ...istry_environment_containers_operations.py | 520 +- ...egistry_environment_versions_operations.py | 546 +- .../_registry_model_containers_operations.py | 519 +- .../_registry_model_versions_operations.py | 916 +- .../aio/operations/_schedules_operations.py | 520 +- .../_serverless_endpoints_operations.py | 997 +- .../aio/operations/_usages_operations.py | 113 +- .../_virtual_machine_sizes_operations.py | 87 +- .../_workspace_connections_operations.py | 804 +- .../_workspace_features_operations.py | 117 +- .../aio/operations/_workspaces_operations.py | 1456 +- .../v2023_08_01_preview/models/__init__.py | 3283 +- .../_azure_machine_learning_services_enums.py | 2060 + ...azure_machine_learning_workspaces_enums.py | 1955 - .../v2023_08_01_preview/models/_models.py | 30965 ---------------- .../v2023_08_01_preview/models/_models_py3.py | 21601 ++++++----- .../v2023_08_01_preview/models/_patch.py | 20 + .../operations/__init__.py | 149 +- .../_batch_deployments_operations.py | 1181 +- .../operations/_batch_endpoints_operations.py | 1199 +- .../operations/_code_containers_operations.py | 610 +- .../operations/_code_versions_operations.py | 895 +- .../_component_containers_operations.py | 626 +- .../_component_versions_operations.py | 687 +- .../operations/_compute_operations.py | 2578 +- .../operations/_data_containers_operations.py | 627 +- .../operations/_data_versions_operations.py | 697 +- .../operations/_datastores_operations.py | 797 +- .../_environment_containers_operations.py | 627 +- .../_environment_versions_operations.py | 687 +- .../operations/_features_operations.py | 404 +- .../_featureset_containers_operations.py | 845 +- .../_featureset_versions_operations.py | 1206 +- ...aturestore_entity_containers_operations.py | 846 +- ...featurestore_entity_versions_operations.py | 903 +- .../operations/_jobs_operations.py | 1121 +- .../operations/_labeling_jobs_operations.py | 1293 +- .../_managed_network_provisions_operations.py | 342 +- ...anaged_network_settings_rule_operations.py | 760 +- .../_model_containers_operations.py | 636 +- .../operations/_model_versions_operations.py | 1046 +- .../_online_deployments_operations.py | 1565 +- .../_online_endpoints_operations.py | 1640 +- .../operations/_operations.py | 157 +- .../v2023_08_01_preview/operations/_patch.py | 20 + ...private_endpoint_connections_operations.py | 617 +- .../_private_link_resources_operations.py | 190 +- .../operations/_quotas_operations.py | 341 +- .../operations/_registries_operations.py | 1289 +- .../_registry_code_containers_operations.py | 782 +- .../_registry_code_versions_operations.py | 1055 +- ...egistry_component_containers_operations.py | 789 +- ..._registry_component_versions_operations.py | 862 +- .../_registry_data_containers_operations.py | 797 +- .../_registry_data_versions_operations.py | 1076 +- ...istry_environment_containers_operations.py | 807 +- ...egistry_environment_versions_operations.py | 887 +- .../_registry_model_containers_operations.py | 801 +- .../_registry_model_versions_operations.py | 1406 +- .../operations/_schedules_operations.py | 790 +- .../_serverless_endpoints_operations.py | 1470 +- .../operations/_usages_operations.py | 178 +- .../_virtual_machine_sizes_operations.py | 151 +- .../_workspace_connections_operations.py | 1256 +- .../_workspace_features_operations.py | 191 +- .../operations/_workspaces_operations.py | 2251 +- .../ml/operations/_feature_set_operations.py | 2 +- .../2023-08-01-preview/workspaceRP.json | 358 +- 117 files changed, 59739 insertions(+), 71996 deletions(-) rename sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/{_azure_machine_learning_workspaces.py => _azure_machine_learning_services.py} (69%) create mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_serialization.py rename sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/{_azure_machine_learning_workspaces.py => _azure_machine_learning_services.py} (71%) create mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_patch.py create mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_azure_machine_learning_services_enums.py delete mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_azure_machine_learning_workspaces_enums.py delete mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_models.py create mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_patch.py create mode 100644 sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_patch.py diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py index 5d4e692cd98f..2f4017d8c7cd 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_ml_client.py @@ -32,7 +32,9 @@ from azure.ai.ml._restclient.v2023_04_01 import AzureMachineLearningWorkspaces as ServiceClient042023 from azure.ai.ml._restclient.v2023_04_01_preview import AzureMachineLearningWorkspaces as ServiceClient042023Preview from azure.ai.ml._restclient.v2023_06_01_preview import AzureMachineLearningWorkspaces as ServiceClient062023Preview -from azure.ai.ml._restclient.v2023_08_01_preview import AzureMachineLearningWorkspaces as ServiceClient082023Preview + +# Same object, but was renamed starting in v2023_08_01_preview +from azure.ai.ml._restclient.v2023_08_01_preview import AzureMachineLearningServices as ServiceClient082023Preview from azure.ai.ml._restclient.v2023_10_01 import AzureMachineLearningWorkspaces as ServiceClient102023 from azure.ai.ml._scope_dependent_operations import OperationConfig, OperationsContainer, OperationScope from azure.ai.ml._telemetry.logging_handler import get_appinsights_log_handler diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/__init__.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/__init__.py index da46614477a9..e9158a84f42d 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/__init__.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/__init__.py @@ -6,13 +6,21 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces +from ._azure_machine_learning_services import AzureMachineLearningServices from ._version import VERSION __version__ = VERSION -__all__ = ['AzureMachineLearningWorkspaces'] -# `._patch.py` is used for handwritten extensions to the generated code -# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md -from ._patch import patch_sdk -patch_sdk() +try: + from ._patch import * # pylint: disable=unused-wildcard-import + from ._patch import __all__ as _patch_all +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AzureMachineLearningServices", +] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_azure_machine_learning_workspaces.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_azure_machine_learning_services.py similarity index 69% rename from sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_azure_machine_learning_workspaces.py rename to sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_azure_machine_learning_services.py index 3c9b9eadda23..36da4b09e060 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_azure_machine_learning_workspaces.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_azure_machine_learning_services.py @@ -7,24 +7,70 @@ # -------------------------------------------------------------------------- from copy import deepcopy -from typing import TYPE_CHECKING - -from msrest import Deserializer, Serializer +from typing import TYPE_CHECKING, Any +from azure.core.rest import HttpRequest, HttpResponse from azure.mgmt.core import ARMPipelineClient -from . import models -from ._configuration import AzureMachineLearningWorkspacesConfiguration -from .operations import BatchDeploymentsOperations, BatchEndpointsOperations, CodeContainersOperations, CodeVersionsOperations, ComponentContainersOperations, ComponentVersionsOperations, ComputeOperations, DataContainersOperations, DataVersionsOperations, DatastoresOperations, EnvironmentContainersOperations, EnvironmentVersionsOperations, FeaturesOperations, FeaturesetContainersOperations, FeaturesetVersionsOperations, FeaturestoreEntityContainersOperations, FeaturestoreEntityVersionsOperations, JobsOperations, LabelingJobsOperations, ManagedNetworkProvisionsOperations, ManagedNetworkSettingsRuleOperations, ModelContainersOperations, ModelVersionsOperations, OnlineDeploymentsOperations, OnlineEndpointsOperations, Operations, PrivateEndpointConnectionsOperations, PrivateLinkResourcesOperations, QuotasOperations, RegistriesOperations, RegistryCodeContainersOperations, RegistryCodeVersionsOperations, RegistryComponentContainersOperations, RegistryComponentVersionsOperations, RegistryDataContainersOperations, RegistryDataVersionsOperations, RegistryEnvironmentContainersOperations, RegistryEnvironmentVersionsOperations, RegistryModelContainersOperations, RegistryModelVersionsOperations, SchedulesOperations, ServerlessEndpointsOperations, UsagesOperations, VirtualMachineSizesOperations, WorkspaceConnectionsOperations, WorkspaceFeaturesOperations, WorkspacesOperations +from . import models as _models +from ._configuration import AzureMachineLearningServicesConfiguration +from ._serialization import Deserializer, Serializer +from .operations import ( + BatchDeploymentsOperations, + BatchEndpointsOperations, + CodeContainersOperations, + CodeVersionsOperations, + ComponentContainersOperations, + ComponentVersionsOperations, + ComputeOperations, + DataContainersOperations, + DatastoresOperations, + DataVersionsOperations, + EnvironmentContainersOperations, + EnvironmentVersionsOperations, + FeaturesetContainersOperations, + FeaturesetVersionsOperations, + FeaturesOperations, + FeaturestoreEntityContainersOperations, + FeaturestoreEntityVersionsOperations, + JobsOperations, + LabelingJobsOperations, + ManagedNetworkProvisionsOperations, + ManagedNetworkSettingsRuleOperations, + ModelContainersOperations, + ModelVersionsOperations, + OnlineDeploymentsOperations, + OnlineEndpointsOperations, + Operations, + PrivateEndpointConnectionsOperations, + PrivateLinkResourcesOperations, + QuotasOperations, + RegistriesOperations, + RegistryCodeContainersOperations, + RegistryCodeVersionsOperations, + RegistryComponentContainersOperations, + RegistryComponentVersionsOperations, + RegistryDataContainersOperations, + RegistryDataVersionsOperations, + RegistryEnvironmentContainersOperations, + RegistryEnvironmentVersionsOperations, + RegistryModelContainersOperations, + RegistryModelVersionsOperations, + SchedulesOperations, + ServerlessEndpointsOperations, + UsagesOperations, + VirtualMachineSizesOperations, + WorkspaceConnectionsOperations, + WorkspaceFeaturesOperations, + WorkspacesOperations, +) if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any - from azure.core.credentials import TokenCredential - from azure.core.rest import HttpRequest, HttpResponse -class AzureMachineLearningWorkspaces(object): # pylint: disable=too-many-instance-attributes + +class AzureMachineLearningServices: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes """These APIs allow end users to operate on Azure Machine Learning Workspace resources. :ivar usages: UsagesOperations operations @@ -154,14 +200,14 @@ class AzureMachineLearningWorkspaces(object): # pylint: disable=too-many-inst :ivar managed_network_provisions: ManagedNetworkProvisionsOperations operations :vartype managed_network_provisions: azure.mgmt.machinelearningservices.operations.ManagedNetworkProvisionsOperations - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The ID of the target subscription. + :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :param base_url: Service URL. Default value is 'https://management.azure.com'. + :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: Api Version. The default value is "2023-08-01-preview". Note that - overriding this default value may result in unsupported behavior. + :keyword api_version: Api Version. Default value is "2023-08-01-preview". Note that overriding + this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. @@ -169,74 +215,129 @@ class AzureMachineLearningWorkspaces(object): # pylint: disable=too-many-inst def __init__( self, - credential, # type: "TokenCredential" - subscription_id, # type: str - base_url="https://management.azure.com", # type: str - **kwargs # type: Any - ): - # type: (...) -> None - self._config = AzureMachineLearningWorkspacesConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) - self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + credential: "TokenCredential", + subscription_id: str, + base_url: str = "https://management.azure.com", + **kwargs: Any + ) -> None: + self._config = AzureMachineLearningServicesConfiguration( + credential=credential, subscription_id=subscription_id, **kwargs + ) + self._client: ARMPipelineClient = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) - self.virtual_machine_sizes = VirtualMachineSizesOperations(self._client, self._config, self._serialize, self._deserialize) + self.virtual_machine_sizes = VirtualMachineSizesOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.quotas = QuotasOperations(self._client, self._config, self._serialize, self._deserialize) self.compute = ComputeOperations(self._client, self._config, self._serialize, self._deserialize) self.registries = RegistriesOperations(self._client, self._config, self._serialize, self._deserialize) - self.workspace_features = WorkspaceFeaturesOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_code_containers = RegistryCodeContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_code_versions = RegistryCodeVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_component_containers = RegistryComponentContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_component_versions = RegistryComponentVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_data_containers = RegistryDataContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_data_versions = RegistryDataVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_environment_containers = RegistryEnvironmentContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_environment_versions = RegistryEnvironmentVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_model_containers = RegistryModelContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_model_versions = RegistryModelVersionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspace_features = WorkspaceFeaturesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_code_containers = RegistryCodeContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_code_versions = RegistryCodeVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_component_containers = RegistryComponentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_component_versions = RegistryComponentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_data_containers = RegistryDataContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_data_versions = RegistryDataVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_environment_containers = RegistryEnvironmentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_environment_versions = RegistryEnvironmentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_model_containers = RegistryModelContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_model_versions = RegistryModelVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.batch_endpoints = BatchEndpointsOperations(self._client, self._config, self._serialize, self._deserialize) - self.batch_deployments = BatchDeploymentsOperations(self._client, self._config, self._serialize, self._deserialize) + self.batch_deployments = BatchDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.code_containers = CodeContainersOperations(self._client, self._config, self._serialize, self._deserialize) self.code_versions = CodeVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.component_containers = ComponentContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.component_versions = ComponentVersionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.component_containers = ComponentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.component_versions = ComponentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.data_containers = DataContainersOperations(self._client, self._config, self._serialize, self._deserialize) self.data_versions = DataVersionsOperations(self._client, self._config, self._serialize, self._deserialize) self.datastores = DatastoresOperations(self._client, self._config, self._serialize, self._deserialize) - self.environment_containers = EnvironmentContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.environment_versions = EnvironmentVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.featureset_containers = FeaturesetContainersOperations(self._client, self._config, self._serialize, self._deserialize) + self.environment_containers = EnvironmentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.environment_versions = EnvironmentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.featureset_containers = FeaturesetContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.features = FeaturesOperations(self._client, self._config, self._serialize, self._deserialize) - self.featureset_versions = FeaturesetVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.featurestore_entity_containers = FeaturestoreEntityContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.featurestore_entity_versions = FeaturestoreEntityVersionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.featureset_versions = FeaturesetVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.featurestore_entity_containers = FeaturestoreEntityContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.featurestore_entity_versions = FeaturestoreEntityVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.jobs = JobsOperations(self._client, self._config, self._serialize, self._deserialize) self.labeling_jobs = LabelingJobsOperations(self._client, self._config, self._serialize, self._deserialize) - self.model_containers = ModelContainersOperations(self._client, self._config, self._serialize, self._deserialize) + self.model_containers = ModelContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.model_versions = ModelVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.online_endpoints = OnlineEndpointsOperations(self._client, self._config, self._serialize, self._deserialize) - self.online_deployments = OnlineDeploymentsOperations(self._client, self._config, self._serialize, self._deserialize) + self.online_endpoints = OnlineEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.online_deployments = OnlineDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.schedules = SchedulesOperations(self._client, self._config, self._serialize, self._deserialize) - self.serverless_endpoints = ServerlessEndpointsOperations(self._client, self._config, self._serialize, self._deserialize) + self.serverless_endpoints = ServerlessEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) - self.workspace_connections = WorkspaceConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.managed_network_settings_rule = ManagedNetworkSettingsRuleOperations(self._client, self._config, self._serialize, self._deserialize) - self.private_endpoint_connections = PrivateEndpointConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.private_link_resources = PrivateLinkResourcesOperations(self._client, self._config, self._serialize, self._deserialize) - self.managed_network_provisions = ManagedNetworkProvisionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspace_connections = WorkspaceConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.managed_network_settings_rule = ManagedNetworkSettingsRuleOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.managed_network_provisions = ManagedNetworkProvisionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) - - def _send_request( - self, - request, # type: HttpRequest - **kwargs # type: Any - ): - # type: (...) -> HttpResponse + def _send_request(self, request: HttpRequest, **kwargs: Any) -> HttpResponse: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -245,7 +346,7 @@ def _send_request( >>> response = client._send_request(request) - For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest @@ -258,15 +359,12 @@ def _send_request( request_copy.url = self._client.format_url(request_copy.url) return self._client.send_request(request_copy, **kwargs) - def close(self): - # type: () -> None + def close(self) -> None: self._client.close() - def __enter__(self): - # type: () -> AzureMachineLearningWorkspaces + def __enter__(self) -> "AzureMachineLearningServices": self._client.__enter__() return self - def __exit__(self, *exc_details): - # type: (Any) -> None + def __exit__(self, *exc_details: Any) -> None: self._client.__exit__(*exc_details) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_configuration.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_configuration.py index c5e20f8e1edd..ec306c9310ea 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_configuration.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_configuration.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from azure.core.configuration import Configuration from azure.core.pipeline import policies @@ -16,35 +16,27 @@ if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports - from typing import Any - from azure.core.credentials import TokenCredential -class AzureMachineLearningWorkspacesConfiguration(Configuration): # pylint: disable=too-many-instance-attributes - """Configuration for AzureMachineLearningWorkspaces. +class AzureMachineLearningServicesConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for AzureMachineLearningServices. Note that all parameters used to create this instance are saved as instance attributes. - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials.TokenCredential - :param subscription_id: The ID of the target subscription. + :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :keyword api_version: Api Version. The default value is "2023-08-01-preview". Note that - overriding this default value may result in unsupported behavior. + :keyword api_version: Api Version. Default value is "2023-08-01-preview". Note that overriding + this default value may result in unsupported behavior. :paramtype api_version: str """ - def __init__( - self, - credential, # type: "TokenCredential" - subscription_id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + def __init__(self, credential: "TokenCredential", subscription_id: str, **kwargs: Any) -> None: + super(AzureMachineLearningServicesConfiguration, self).__init__(**kwargs) + api_version: str = kwargs.pop("api_version", "2023-08-01-preview") if credential is None: raise ValueError("Parameter 'credential' must not be None.") @@ -54,23 +46,21 @@ def __init__( self.credential = credential self.subscription_id = subscription_id self.api_version = api_version - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'mgmt-machinelearningservices/{}'.format(VERSION)) + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-machinelearningservices/{}".format(VERSION)) self._configure(**kwargs) - def _configure( - self, - **kwargs # type: Any - ): - # type: (...) -> None - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.RetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.RedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: - self.authentication_policy = ARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = ARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_patch.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_patch.py index 74e48ecd07cf..f99e77fef986 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_patch.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_patch.py @@ -28,4 +28,4 @@ # This file is used for handwritten extensions to the generated code. Example: # https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md def patch_sdk(): - pass \ No newline at end of file + pass diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_serialization.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_serialization.py new file mode 100644 index 000000000000..8d266f90cc35 --- /dev/null +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_serialization.py @@ -0,0 +1,1992 @@ +# -------------------------------------------------------------------------- +# +# Copyright (c) Microsoft Corporation. All rights reserved. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the ""Software""), to +# deal in the Software without restriction, including without limitation the +# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +# sell copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +# IN THE SOFTWARE. +# +# -------------------------------------------------------------------------- + +# pylint: skip-file +# pyright: reportUnnecessaryTypeIgnoreComment=false + +import calendar +import codecs +import datetime +import decimal +import email +import json +import logging +import re +import sys +from base64 import b64decode, b64encode +from enum import Enum +from typing import IO, Any, AnyStr, Callable, Dict, List, Mapping, MutableMapping, Optional, Type, TypeVar, Union, cast + +try: + from urllib import quote # type: ignore +except ImportError: + from urllib.parse import quote + +import xml.etree.ElementTree as ET + +import isodate # type: ignore + +from azure.core.exceptions import DeserializationError, SerializationError, raise_with_traceback +from azure.core.serialization import NULL as AzureCoreNull + +_BOM = codecs.BOM_UTF8.decode(encoding="utf-8") + +ModelType = TypeVar("ModelType", bound="Model") +JSON = MutableMapping[str, Any] + + +class RawDeserializer: + + # Accept "text" because we're open minded people... + JSON_REGEXP = re.compile(r"^(application|text)/([a-z+.]+\+)?json$") + + # Name used in context + CONTEXT_NAME = "deserialized_data" + + @classmethod + def deserialize_from_text(cls, data: Optional[Union[AnyStr, IO]], content_type: Optional[str] = None) -> Any: + """Decode data according to content-type. + + Accept a stream of data as well, but will be load at once in memory for now. + + If no content-type, will return the string version (not bytes, not stream) + + :param data: Input, could be bytes or stream (will be decoded with UTF8) or text + :type data: str or bytes or IO + :param str content_type: The content type. + """ + if hasattr(data, "read"): + # Assume a stream + data = cast(IO, data).read() + + if isinstance(data, bytes): + data_as_str = data.decode(encoding="utf-8-sig") + else: + # Explain to mypy the correct type. + data_as_str = cast(str, data) + + # Remove Byte Order Mark if present in string + data_as_str = data_as_str.lstrip(_BOM) + + if content_type is None: + return data + + if cls.JSON_REGEXP.match(content_type): + try: + return json.loads(data_as_str) + except ValueError as err: + raise DeserializationError("JSON is invalid: {}".format(err), err) + elif "xml" in (content_type or []): + try: + + try: + if isinstance(data, unicode): # type: ignore + # If I'm Python 2.7 and unicode XML will scream if I try a "fromstring" on unicode string + data_as_str = data_as_str.encode(encoding="utf-8") # type: ignore + except NameError: + pass + + return ET.fromstring(data_as_str) # nosec + except ET.ParseError: + # It might be because the server has an issue, and returned JSON with + # content-type XML.... + # So let's try a JSON load, and if it's still broken + # let's flow the initial exception + def _json_attemp(data): + try: + return True, json.loads(data) + except ValueError: + return False, None # Don't care about this one + + success, json_result = _json_attemp(data) + if success: + return json_result + # If i'm here, it's not JSON, it's not XML, let's scream + # and raise the last context in this block (the XML exception) + # The function hack is because Py2.7 messes up with exception + # context otherwise. + _LOGGER.critical("Wasn't XML not JSON, failing") + raise_with_traceback(DeserializationError, "XML is invalid") + raise DeserializationError("Cannot deserialize content-type: {}".format(content_type)) + + @classmethod + def deserialize_from_http_generics(cls, body_bytes: Optional[Union[AnyStr, IO]], headers: Mapping) -> Any: + """Deserialize from HTTP response. + + Use bytes and headers to NOT use any requests/aiohttp or whatever + specific implementation. + Headers will tested for "content-type" + """ + # Try to use content-type from headers if available + content_type = None + if "content-type" in headers: + content_type = headers["content-type"].split(";")[0].strip().lower() + # Ouch, this server did not declare what it sent... + # Let's guess it's JSON... + # Also, since Autorest was considering that an empty body was a valid JSON, + # need that test as well.... + else: + content_type = "application/json" + + if body_bytes: + return cls.deserialize_from_text(body_bytes, content_type) + return None + + +try: + basestring # type: ignore + unicode_str = unicode # type: ignore +except NameError: + basestring = str + unicode_str = str + +_LOGGER = logging.getLogger(__name__) + +try: + _long_type = long # type: ignore +except NameError: + _long_type = int + + +class UTC(datetime.tzinfo): + """Time Zone info for handling UTC""" + + def utcoffset(self, dt): + """UTF offset for UTC is 0.""" + return datetime.timedelta(0) + + def tzname(self, dt): + """Timestamp representation.""" + return "Z" + + def dst(self, dt): + """No daylight saving for UTC.""" + return datetime.timedelta(hours=1) + + +try: + from datetime import timezone as _FixedOffset # type: ignore +except ImportError: # Python 2.7 + + class _FixedOffset(datetime.tzinfo): # type: ignore + """Fixed offset in minutes east from UTC. + Copy/pasted from Python doc + :param datetime.timedelta offset: offset in timedelta format + """ + + def __init__(self, offset): + self.__offset = offset + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return str(self.__offset.total_seconds() / 3600) + + def __repr__(self): + return "".format(self.tzname(None)) + + def dst(self, dt): + return datetime.timedelta(0) + + def __getinitargs__(self): + return (self.__offset,) + + +try: + from datetime import timezone + + TZ_UTC = timezone.utc +except ImportError: + TZ_UTC = UTC() # type: ignore + +_FLATTEN = re.compile(r"(? None: + self.additional_properties: Dict[str, Any] = {} + for k in kwargs: + if k not in self._attribute_map: + _LOGGER.warning("%s is not a known attribute of class %s and will be ignored", k, self.__class__) + elif k in self._validation and self._validation[k].get("readonly", False): + _LOGGER.warning("Readonly attribute %s will be ignored in class %s", k, self.__class__) + else: + setattr(self, k, kwargs[k]) + + def __eq__(self, other: Any) -> bool: + """Compare objects by comparing all attributes.""" + if isinstance(other, self.__class__): + return self.__dict__ == other.__dict__ + return False + + def __ne__(self, other: Any) -> bool: + """Compare objects by comparing all attributes.""" + return not self.__eq__(other) + + def __str__(self) -> str: + return str(self.__dict__) + + @classmethod + def enable_additional_properties_sending(cls) -> None: + cls._attribute_map["additional_properties"] = {"key": "", "type": "{object}"} + + @classmethod + def is_xml_model(cls) -> bool: + try: + cls._xml_map # type: ignore + except AttributeError: + return False + return True + + @classmethod + def _create_xml_node(cls): + """Create XML node.""" + try: + xml_map = cls._xml_map # type: ignore + except AttributeError: + xml_map = {} + + return _create_xml_node(xml_map.get("name", cls.__name__), xml_map.get("prefix", None), xml_map.get("ns", None)) + + def serialize(self, keep_readonly: bool = False, **kwargs: Any) -> JSON: + """Return the JSON that would be sent to azure from this model. + + This is an alias to `as_dict(full_restapi_key_transformer, keep_readonly=False)`. + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param bool keep_readonly: If you want to serialize the readonly attributes + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize(self, keep_readonly=keep_readonly, **kwargs) + + def as_dict( + self, + keep_readonly: bool = True, + key_transformer: Callable[[str, Dict[str, Any], Any], Any] = attribute_transformer, + **kwargs: Any + ) -> JSON: + """Return a dict that can be serialized using json.dump. + + Advanced usage might optionally use a callback as parameter: + + .. code::python + + def my_key_transformer(key, attr_desc, value): + return key + + Key is the attribute name used in Python. Attr_desc + is a dict of metadata. Currently contains 'type' with the + msrest type and 'key' with the RestAPI encoded key. + Value is the current value in this object. + + The string returned will be used to serialize the key. + If the return type is a list, this is considered hierarchical + result dict. + + See the three examples in this file: + + - attribute_transformer + - full_restapi_key_transformer + - last_restapi_key_transformer + + If you want XML serialization, you can pass the kwargs is_xml=True. + + :param function key_transformer: A key transformer function. + :returns: A dict JSON compatible object + :rtype: dict + """ + serializer = Serializer(self._infer_class_models()) + return serializer._serialize(self, key_transformer=key_transformer, keep_readonly=keep_readonly, **kwargs) + + @classmethod + def _infer_class_models(cls): + try: + str_models = cls.__module__.rsplit(".", 1)[0] + models = sys.modules[str_models] + client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + if cls.__name__ not in client_models: + raise ValueError("Not Autorest generated code") + except Exception: + # Assume it's not Autorest generated (tests?). Add ourselves as dependencies. + client_models = {cls.__name__: cls} + return client_models + + @classmethod + def deserialize(cls: Type[ModelType], data: Any, content_type: Optional[str] = None) -> ModelType: + """Parse a str using the RestAPI syntax and return a model. + + :param str data: A str using RestAPI structure. JSON by default. + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises: DeserializationError if something went wrong + """ + deserializer = Deserializer(cls._infer_class_models()) + return deserializer(cls.__name__, data, content_type=content_type) + + @classmethod + def from_dict( + cls: Type[ModelType], + data: Any, + key_extractors: Optional[Callable[[str, Dict[str, Any], Any], Any]] = None, + content_type: Optional[str] = None, + ) -> ModelType: + """Parse a dict using given key extractor return a model. + + By default consider key + extractors (rest_key_case_insensitive_extractor, attribute_key_case_insensitive_extractor + and last_rest_key_case_insensitive_extractor) + + :param dict data: A dict using RestAPI structure + :param str content_type: JSON by default, set application/xml if XML. + :returns: An instance of this model + :raises: DeserializationError if something went wrong + """ + deserializer = Deserializer(cls._infer_class_models()) + deserializer.key_extractors = ( # type: ignore + [ # type: ignore + attribute_key_case_insensitive_extractor, + rest_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + if key_extractors is None + else key_extractors + ) + return deserializer(cls.__name__, data, content_type=content_type) + + @classmethod + def _flatten_subtype(cls, key, objects): + if "_subtype_map" not in cls.__dict__: + return {} + result = dict(cls._subtype_map[key]) + for valuetype in cls._subtype_map[key].values(): + result.update(objects[valuetype]._flatten_subtype(key, objects)) + return result + + @classmethod + def _classify(cls, response, objects): + """Check the class _subtype_map for any child classes. + We want to ignore any inherited _subtype_maps. + Remove the polymorphic key from the initial data. + """ + for subtype_key in cls.__dict__.get("_subtype_map", {}).keys(): + subtype_value = None + + if not isinstance(response, ET.Element): + rest_api_response_key = cls._get_rest_key_parts(subtype_key)[-1] + subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) + else: + subtype_value = xml_key_extractor(subtype_key, cls._attribute_map[subtype_key], response) + if subtype_value: + # Try to match base class. Can be class name only + # (bug to fix in Autorest to support x-ms-discriminator-name) + if cls.__name__ == subtype_value: + return cls + flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) + try: + return objects[flatten_mapping_type[subtype_value]] # type: ignore + except KeyError: + _LOGGER.warning( + "Subtype value %s has no mapping, use base class %s.", + subtype_value, + cls.__name__, + ) + break + else: + _LOGGER.warning("Discriminator %s is absent or null, use base class %s.", subtype_key, cls.__name__) + break + return cls + + @classmethod + def _get_rest_key_parts(cls, attr_key): + """Get the RestAPI key of this attr, split it and decode part + :param str attr_key: Attribute key must be in attribute_map. + :returns: A list of RestAPI part + :rtype: list + """ + rest_split_key = _FLATTEN.split(cls._attribute_map[attr_key]["key"]) + return [_decode_attribute_map_key(key_part) for key_part in rest_split_key] + + +def _decode_attribute_map_key(key): + """This decode a key in an _attribute_map to the actual key we want to look at + inside the received data. + + :param str key: A key string from the generated code + """ + return key.replace("\\.", ".") + + +class Serializer(object): + """Request object model serializer.""" + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + _xml_basic_types_serializers = {"bool": lambda x: str(x).lower()} + days = {0: "Mon", 1: "Tue", 2: "Wed", 3: "Thu", 4: "Fri", 5: "Sat", 6: "Sun"} + months = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec", + } + validation = { + "min_length": lambda x, y: len(x) < y, + "max_length": lambda x, y: len(x) > y, + "minimum": lambda x, y: x < y, + "maximum": lambda x, y: x > y, + "minimum_ex": lambda x, y: x <= y, + "maximum_ex": lambda x, y: x >= y, + "min_items": lambda x, y: len(x) < y, + "max_items": lambda x, y: len(x) > y, + "pattern": lambda x, y: not re.match(y, x, re.UNICODE), + "unique": lambda x, y: len(x) != len(set(x)), + "multiple": lambda x, y: x % y != 0, + } + + def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None): + self.serialize_type = { + "iso-8601": Serializer.serialize_iso, + "rfc-1123": Serializer.serialize_rfc, + "unix-time": Serializer.serialize_unix, + "duration": Serializer.serialize_duration, + "date": Serializer.serialize_date, + "time": Serializer.serialize_time, + "decimal": Serializer.serialize_decimal, + "long": Serializer.serialize_long, + "bytearray": Serializer.serialize_bytearray, + "base64": Serializer.serialize_base64, + "object": self.serialize_object, + "[]": self.serialize_iter, + "{}": self.serialize_dict, + } + self.dependencies: Dict[str, Type[ModelType]] = dict(classes) if classes else {} + self.key_transformer = full_restapi_key_transformer + self.client_side_validation = True + + def _serialize(self, target_obj, data_type=None, **kwargs): + """Serialize data into a string according to type. + + :param target_obj: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str, dict + :raises: SerializationError if serialization fails. + """ + key_transformer = kwargs.get("key_transformer", self.key_transformer) + keep_readonly = kwargs.get("keep_readonly", False) + if target_obj is None: + return None + + attr_name = None + class_name = target_obj.__class__.__name__ + + if data_type: + return self.serialize_data(target_obj, data_type, **kwargs) + + if not hasattr(target_obj, "_attribute_map"): + data_type = type(target_obj).__name__ + if data_type in self.basic_types.values(): + return self.serialize_data(target_obj, data_type, **kwargs) + + # Force "is_xml" kwargs if we detect a XML model + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + is_xml_model_serialization = kwargs.setdefault("is_xml", target_obj.is_xml_model()) + + serialized = {} + if is_xml_model_serialization: + serialized = target_obj._create_xml_node() + try: + attributes = target_obj._attribute_map + for attr, attr_desc in attributes.items(): + attr_name = attr + if not keep_readonly and target_obj._validation.get(attr_name, {}).get("readonly", False): + continue + + if attr_name == "additional_properties" and attr_desc["key"] == "": + if target_obj.additional_properties is not None: + serialized.update(target_obj.additional_properties) + continue + try: + + orig_attr = getattr(target_obj, attr) + if is_xml_model_serialization: + pass # Don't provide "transformer" for XML for now. Keep "orig_attr" + else: # JSON + keys, orig_attr = key_transformer(attr, attr_desc.copy(), orig_attr) + keys = keys if isinstance(keys, list) else [keys] + + kwargs["serialization_ctxt"] = attr_desc + new_attr = self.serialize_data(orig_attr, attr_desc["type"], **kwargs) + + if is_xml_model_serialization: + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + xml_prefix = xml_desc.get("prefix", None) + xml_ns = xml_desc.get("ns", None) + if xml_desc.get("attr", False): + if xml_ns: + ET.register_namespace(xml_prefix, xml_ns) + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + serialized.set(xml_name, new_attr) # type: ignore + continue + if xml_desc.get("text", False): + serialized.text = new_attr # type: ignore + continue + if isinstance(new_attr, list): + serialized.extend(new_attr) # type: ignore + elif isinstance(new_attr, ET.Element): + # If the down XML has no XML/Name, we MUST replace the tag with the local tag. But keeping the namespaces. + if "name" not in getattr(orig_attr, "_xml_map", {}): + splitted_tag = new_attr.tag.split("}") + if len(splitted_tag) == 2: # Namespace + new_attr.tag = "}".join([splitted_tag[0], xml_name]) + else: + new_attr.tag = xml_name + serialized.append(new_attr) # type: ignore + else: # That's a basic type + # Integrate namespace if necessary + local_node = _create_xml_node(xml_name, xml_prefix, xml_ns) + local_node.text = unicode_str(new_attr) + serialized.append(local_node) # type: ignore + else: # JSON + for k in reversed(keys): # type: ignore + new_attr = {k: new_attr} + + _new_attr = new_attr + _serialized = serialized + for k in keys: # type: ignore + if k not in _serialized: + _serialized.update(_new_attr) # type: ignore + _new_attr = _new_attr[k] # type: ignore + _serialized = _serialized[k] + except ValueError as err: + if isinstance(err, SerializationError): + raise + + except (AttributeError, KeyError, TypeError) as err: + msg = "Attribute {} in object {} cannot be serialized.\n{}".format(attr_name, class_name, str(target_obj)) + raise_with_traceback(SerializationError, msg, err) + else: + return serialized + + def body(self, data, data_type, **kwargs): + """Serialize data intended for a request body. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: dict + :raises: SerializationError if serialization fails. + :raises: ValueError if data is None + """ + + # Just in case this is a dict + internal_data_type_str = data_type.strip("[]{}") + internal_data_type = self.dependencies.get(internal_data_type_str, None) + try: + is_xml_model_serialization = kwargs["is_xml"] + except KeyError: + if internal_data_type and issubclass(internal_data_type, Model): + is_xml_model_serialization = kwargs.setdefault("is_xml", internal_data_type.is_xml_model()) + else: + is_xml_model_serialization = False + if internal_data_type and not isinstance(internal_data_type, Enum): + try: + deserializer = Deserializer(self.dependencies) + # Since it's on serialization, it's almost sure that format is not JSON REST + # We're not able to deal with additional properties for now. + deserializer.additional_properties_detection = False + if is_xml_model_serialization: + deserializer.key_extractors = [ # type: ignore + attribute_key_case_insensitive_extractor, + ] + else: + deserializer.key_extractors = [ + rest_key_case_insensitive_extractor, + attribute_key_case_insensitive_extractor, + last_rest_key_case_insensitive_extractor, + ] + data = deserializer._deserialize(data_type, data) + except DeserializationError as err: + raise_with_traceback(SerializationError, "Unable to build a model: " + str(err), err) + + return self._serialize(data, data_type, **kwargs) + + def url(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL path. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return output + + def query(self, name, data, data_type, **kwargs): + """Serialize data intended for a URL query. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :keyword bool skip_quote: Whether to skip quote the serialized result. + Defaults to False. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + # Treat the list aside, since we don't want to encode the div separator + if data_type.startswith("["): + internal_data_type = data_type[1:-1] + do_quote = not kwargs.get("skip_quote", False) + return str(self.serialize_iter(data, internal_data_type, do_quote=do_quote, **kwargs)) + + # Not a list, regular serialization + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + if kwargs.get("skip_quote") is True: + output = str(output) + else: + output = quote(str(output), safe="") + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def header(self, name, data, data_type, **kwargs): + """Serialize data intended for a request header. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :rtype: str + :raises: TypeError if serialization fails. + :raises: ValueError if data is None + """ + try: + if data_type in ["[str]"]: + data = ["" if d is None else d for d in data] + + output = self.serialize_data(data, data_type, **kwargs) + if data_type == "bool": + output = json.dumps(output) + except SerializationError: + raise TypeError("{} must be type {}.".format(name, data_type)) + else: + return str(output) + + def serialize_data(self, data, data_type, **kwargs): + """Serialize generic data according to supplied data type. + + :param data: The data to be serialized. + :param str data_type: The type to be serialized from. + :param bool required: Whether it's essential that the data not be + empty or None + :raises: AttributeError if required data is None. + :raises: ValueError if data is None + :raises: SerializationError if serialization fails. + """ + if data is None: + raise ValueError("No value for given attribute") + + try: + if data is AzureCoreNull: + return None + if data_type in self.basic_types.values(): + return self.serialize_basic(data, data_type, **kwargs) + + elif data_type in self.serialize_type: + return self.serialize_type[data_type](data, **kwargs) + + # If dependencies is empty, try with current data class + # It has to be a subclass of Enum anyway + enum_type = self.dependencies.get(data_type, data.__class__) + if issubclass(enum_type, Enum): + return Serializer.serialize_enum(data, enum_obj=enum_type) + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.serialize_type: + return self.serialize_type[iter_type](data, data_type[1:-1], **kwargs) + + except (ValueError, TypeError) as err: + msg = "Unable to serialize value: {!r} as type: {!r}." + raise_with_traceback(SerializationError, msg.format(data, data_type), err) + else: + return self._serialize(data, **kwargs) + + @classmethod + def _get_custom_serializers(cls, data_type, **kwargs): + custom_serializer = kwargs.get("basic_types_serializers", {}).get(data_type) + if custom_serializer: + return custom_serializer + if kwargs.get("is_xml", False): + return cls._xml_basic_types_serializers.get(data_type) + + @classmethod + def serialize_basic(cls, data, data_type, **kwargs): + """Serialize basic builting data type. + Serializes objects to str, int, float or bool. + + Possible kwargs: + - basic_types_serializers dict[str, callable] : If set, use the callable as serializer + - is_xml bool : If set, use xml_basic_types_serializers + + :param data: Object to be serialized. + :param str data_type: Type of object in the iterable. + """ + custom_serializer = cls._get_custom_serializers(data_type, **kwargs) + if custom_serializer: + return custom_serializer(data) + if data_type == "str": + return cls.serialize_unicode(data) + return eval(data_type)(data) # nosec + + @classmethod + def serialize_unicode(cls, data): + """Special handling for serializing unicode strings in Py2. + Encode to UTF-8 if unicode, otherwise handle as a str. + + :param data: Object to be serialized. + :rtype: str + """ + try: # If I received an enum, return its value + return data.value + except AttributeError: + pass + + try: + if isinstance(data, unicode): # type: ignore + # Don't change it, JSON and XML ElementTree are totally able + # to serialize correctly u'' strings + return data + except NameError: + return str(data) + else: + return str(data) + + def serialize_iter(self, data, iter_type, div=None, **kwargs): + """Serialize iterable. + + Supported kwargs: + - serialization_ctxt dict : The current entry of _attribute_map, or same format. + serialization_ctxt['type'] should be same as data_type. + - is_xml bool : If set, serialize as XML + + :param list attr: Object to be serialized. + :param str iter_type: Type of object in the iterable. + :param bool required: Whether the objects in the iterable must + not be None or empty. + :param str div: If set, this str will be used to combine the elements + in the iterable into a combined string. Default is 'None'. + :keyword bool do_quote: Whether to quote the serialized result of each iterable element. + Defaults to False. + :rtype: list, str + """ + if isinstance(data, str): + raise SerializationError("Refuse str type as a valid iter type.") + + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + is_xml = kwargs.get("is_xml", False) + + serialized = [] + for d in data: + try: + serialized.append(self.serialize_data(d, iter_type, **kwargs)) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized.append(None) + + if kwargs.get("do_quote", False): + serialized = ["" if s is None else quote(str(s), safe="") for s in serialized] + + if div: + serialized = ["" if s is None else str(s) for s in serialized] + serialized = div.join(serialized) + + if "xml" in serialization_ctxt or is_xml: + # XML serialization is more complicated + xml_desc = serialization_ctxt.get("xml", {}) + xml_name = xml_desc.get("name") + if not xml_name: + xml_name = serialization_ctxt["key"] + + # Create a wrap node if necessary (use the fact that Element and list have "append") + is_wrapped = xml_desc.get("wrapped", False) + node_name = xml_desc.get("itemsName", xml_name) + if is_wrapped: + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + else: + final_result = [] + # All list elements to "local_node" + for el in serialized: + if isinstance(el, ET.Element): + el_node = el + else: + el_node = _create_xml_node(node_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + if el is not None: # Otherwise it writes "None" :-p + el_node.text = str(el) + final_result.append(el_node) + return final_result + return serialized + + def serialize_dict(self, attr, dict_type, **kwargs): + """Serialize a dictionary of objects. + + :param dict attr: Object to be serialized. + :param str dict_type: Type of object in the dictionary. + :param bool required: Whether the objects in the dictionary must + not be None or empty. + :rtype: dict + """ + serialization_ctxt = kwargs.get("serialization_ctxt", {}) + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_data(value, dict_type, **kwargs) + except ValueError as err: + if isinstance(err, SerializationError): + raise + serialized[self.serialize_unicode(key)] = None + + if "xml" in serialization_ctxt: + # XML serialization is more complicated + xml_desc = serialization_ctxt["xml"] + xml_name = xml_desc["name"] + + final_result = _create_xml_node(xml_name, xml_desc.get("prefix", None), xml_desc.get("ns", None)) + for key, value in serialized.items(): + ET.SubElement(final_result, key).text = value + return final_result + + return serialized + + def serialize_object(self, attr, **kwargs): + """Serialize a generic object. + This will be handled as a dictionary. If object passed in is not + a basic type (str, int, float, dict, list) it will simply be + cast to str. + + :param dict attr: Object to be serialized. + :rtype: dict or str + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + return attr + obj_type = type(attr) + if obj_type in self.basic_types: + return self.serialize_basic(attr, self.basic_types[obj_type], **kwargs) + if obj_type is _long_type: + return self.serialize_long(attr) + if obj_type is unicode_str: + return self.serialize_unicode(attr) + if obj_type is datetime.datetime: + return self.serialize_iso(attr) + if obj_type is datetime.date: + return self.serialize_date(attr) + if obj_type is datetime.time: + return self.serialize_time(attr) + if obj_type is datetime.timedelta: + return self.serialize_duration(attr) + if obj_type is decimal.Decimal: + return self.serialize_decimal(attr) + + # If it's a model or I know this dependency, serialize as a Model + elif obj_type in self.dependencies.values() or isinstance(attr, Model): + return self._serialize(attr) + + if obj_type == dict: + serialized = {} + for key, value in attr.items(): + try: + serialized[self.serialize_unicode(key)] = self.serialize_object(value, **kwargs) + except ValueError: + serialized[self.serialize_unicode(key)] = None + return serialized + + if obj_type == list: + serialized = [] + for obj in attr: + try: + serialized.append(self.serialize_object(obj, **kwargs)) + except ValueError: + pass + return serialized + return str(attr) + + @staticmethod + def serialize_enum(attr, enum_obj=None): + try: + result = attr.value + except AttributeError: + result = attr + try: + enum_obj(result) # type: ignore + return result + except ValueError: + for enum_value in enum_obj: # type: ignore + if enum_value.value.lower() == str(attr).lower(): + return enum_value.value + error = "{!r} is not valid value for enum {!r}" + raise SerializationError(error.format(attr, enum_obj)) + + @staticmethod + def serialize_bytearray(attr, **kwargs): + """Serialize bytearray into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + return b64encode(attr).decode() + + @staticmethod + def serialize_base64(attr, **kwargs): + """Serialize str into base-64 string. + + :param attr: Object to be serialized. + :rtype: str + """ + encoded = b64encode(attr).decode("ascii") + return encoded.strip("=").replace("+", "-").replace("/", "_") + + @staticmethod + def serialize_decimal(attr, **kwargs): + """Serialize Decimal object to float. + + :param attr: Object to be serialized. + :rtype: float + """ + return float(attr) + + @staticmethod + def serialize_long(attr, **kwargs): + """Serialize long (Py2) or int (Py3). + + :param attr: Object to be serialized. + :rtype: int/long + """ + return _long_type(attr) + + @staticmethod + def serialize_date(attr, **kwargs): + """Serialize Date object into ISO-8601 formatted string. + + :param Date attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_date(attr) + t = "{:04}-{:02}-{:02}".format(attr.year, attr.month, attr.day) + return t + + @staticmethod + def serialize_time(attr, **kwargs): + """Serialize Time object into ISO-8601 formatted string. + + :param datetime.time attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_time(attr) + t = "{:02}:{:02}:{:02}".format(attr.hour, attr.minute, attr.second) + if attr.microsecond: + t += ".{:02}".format(attr.microsecond) + return t + + @staticmethod + def serialize_duration(attr, **kwargs): + """Serialize TimeDelta object into ISO-8601 formatted string. + + :param TimeDelta attr: Object to be serialized. + :rtype: str + """ + if isinstance(attr, str): + attr = isodate.parse_duration(attr) + return isodate.duration_isoformat(attr) + + @staticmethod + def serialize_rfc(attr, **kwargs): + """Serialize Datetime object into RFC-1123 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: TypeError if format invalid. + """ + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + except AttributeError: + raise TypeError("RFC1123 object must be valid Datetime object.") + + return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT".format( + Serializer.days[utc.tm_wday], + utc.tm_mday, + Serializer.months[utc.tm_mon], + utc.tm_year, + utc.tm_hour, + utc.tm_min, + utc.tm_sec, + ) + + @staticmethod + def serialize_iso(attr, **kwargs): + """Serialize Datetime object into ISO-8601 formatted string. + + :param Datetime attr: Object to be serialized. + :rtype: str + :raises: SerializationError if format invalid. + """ + if isinstance(attr, str): + attr = isodate.parse_datetime(attr) + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + utc = attr.utctimetuple() + if utc.tm_year > 9999 or utc.tm_year < 1: + raise OverflowError("Hit max or min date") + + microseconds = str(attr.microsecond).rjust(6, "0").rstrip("0").ljust(3, "0") + if microseconds: + microseconds = "." + microseconds + date = "{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format( + utc.tm_year, utc.tm_mon, utc.tm_mday, utc.tm_hour, utc.tm_min, utc.tm_sec + ) + return date + microseconds + "Z" + except (ValueError, OverflowError) as err: + msg = "Unable to serialize datetime object." + raise_with_traceback(SerializationError, msg, err) + except AttributeError as err: + msg = "ISO-8601 object must be valid Datetime object." + raise_with_traceback(TypeError, msg, err) + + @staticmethod + def serialize_unix(attr, **kwargs): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param Datetime attr: Object to be serialized. + :rtype: int + :raises: SerializationError if format invalid + """ + if isinstance(attr, int): + return attr + try: + if not attr.tzinfo: + _LOGGER.warning("Datetime with no tzinfo will be considered UTC.") + return int(calendar.timegm(attr.utctimetuple())) + except AttributeError: + raise TypeError("Unix time object must be valid Datetime object.") + + +def rest_key_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + # Need the cast, as for some reasons "split" is typed as list[str | Any] + dict_keys = cast(List[str], _FLATTEN.split(key)) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = working_data.get(working_key, data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = ".".join(dict_keys[1:]) + + return working_data.get(key) + + +def rest_key_case_insensitive_extractor(attr, attr_desc, data): + key = attr_desc["key"] + working_data = data + + while "." in key: + dict_keys = _FLATTEN.split(key) + if len(dict_keys) == 1: + key = _decode_attribute_map_key(dict_keys[0]) + break + working_key = _decode_attribute_map_key(dict_keys[0]) + working_data = attribute_key_case_insensitive_extractor(working_key, None, working_data) + if working_data is None: + # If at any point while following flatten JSON path see None, it means + # that all properties under are None as well + # https://github.com/Azure/msrest-for-python/issues/197 + return None + key = ".".join(dict_keys[1:]) + + if working_data: + return attribute_key_case_insensitive_extractor(key, None, working_data) + + +def last_rest_key_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key.""" + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_extractor(dict_keys[-1], None, data) + + +def last_rest_key_case_insensitive_extractor(attr, attr_desc, data): + """Extract the attribute in "data" based on the last part of the JSON path key. + + This is the case insensitive version of "last_rest_key_extractor" + """ + key = attr_desc["key"] + dict_keys = _FLATTEN.split(key) + return attribute_key_case_insensitive_extractor(dict_keys[-1], None, data) + + +def attribute_key_extractor(attr, _, data): + return data.get(attr) + + +def attribute_key_case_insensitive_extractor(attr, _, data): + found_key = None + lower_attr = attr.lower() + for key in data: + if lower_attr == key.lower(): + found_key = key + break + + return data.get(found_key) + + +def _extract_name_from_internal_type(internal_type): + """Given an internal type XML description, extract correct XML name with namespace. + + :param dict internal_type: An model type + :rtype: tuple + :returns: A tuple XML name + namespace dict + """ + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + xml_name = internal_type_xml_map.get("name", internal_type.__name__) + xml_ns = internal_type_xml_map.get("ns", None) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + return xml_name + + +def xml_key_extractor(attr, attr_desc, data): + if isinstance(data, dict): + return None + + # Test if this model is XML ready first + if not isinstance(data, ET.Element): + return None + + xml_desc = attr_desc.get("xml", {}) + xml_name = xml_desc.get("name", attr_desc["key"]) + + # Look for a children + is_iter_type = attr_desc["type"].startswith("[") + is_wrapped = xml_desc.get("wrapped", False) + internal_type = attr_desc.get("internalType", None) + internal_type_xml_map = getattr(internal_type, "_xml_map", {}) + + # Integrate namespace if necessary + xml_ns = xml_desc.get("ns", internal_type_xml_map.get("ns", None)) + if xml_ns: + xml_name = "{{{}}}{}".format(xml_ns, xml_name) + + # If it's an attribute, that's simple + if xml_desc.get("attr", False): + return data.get(xml_name) + + # If it's x-ms-text, that's simple too + if xml_desc.get("text", False): + return data.text + + # Scenario where I take the local name: + # - Wrapped node + # - Internal type is an enum (considered basic types) + # - Internal type has no XML/Name node + if is_wrapped or (internal_type and (issubclass(internal_type, Enum) or "name" not in internal_type_xml_map)): + children = data.findall(xml_name) + # If internal type has a local name and it's not a list, I use that name + elif not is_iter_type and internal_type and "name" in internal_type_xml_map: + xml_name = _extract_name_from_internal_type(internal_type) + children = data.findall(xml_name) + # That's an array + else: + if internal_type: # Complex type, ignore itemsName and use the complex type name + items_name = _extract_name_from_internal_type(internal_type) + else: + items_name = xml_desc.get("itemsName", xml_name) + children = data.findall(items_name) + + if len(children) == 0: + if is_iter_type: + if is_wrapped: + return None # is_wrapped no node, we want None + else: + return [] # not wrapped, assume empty list + return None # Assume it's not there, maybe an optional node. + + # If is_iter_type and not wrapped, return all found children + if is_iter_type: + if not is_wrapped: + return children + else: # Iter and wrapped, should have found one node only (the wrap one) + if len(children) != 1: + raise DeserializationError( + "Tried to deserialize an array not wrapped, and found several nodes '{}'. Maybe you should declare this array as wrapped?".format( + xml_name + ) + ) + return list(children[0]) # Might be empty list and that's ok. + + # Here it's not a itertype, we should have found one element only or empty + if len(children) > 1: + raise DeserializationError("Find several XML '{}' where it was not expected".format(xml_name)) + return children[0] + + +class Deserializer(object): + """Response object model deserializer. + + :param dict classes: Class type dictionary for deserializing complex types. + :ivar list key_extractors: Ordered list of extractors to be used by this deserializer. + """ + + basic_types = {str: "str", int: "int", bool: "bool", float: "float"} + + valid_date = re.compile(r"\d{4}[-]\d{2}[-]\d{2}T\d{2}:\d{2}:\d{2}" r"\.?\d*Z?[-+]?[\d{2}]?:?[\d{2}]?") + + def __init__(self, classes: Optional[Mapping[str, Type[ModelType]]] = None): + self.deserialize_type = { + "iso-8601": Deserializer.deserialize_iso, + "rfc-1123": Deserializer.deserialize_rfc, + "unix-time": Deserializer.deserialize_unix, + "duration": Deserializer.deserialize_duration, + "date": Deserializer.deserialize_date, + "time": Deserializer.deserialize_time, + "decimal": Deserializer.deserialize_decimal, + "long": Deserializer.deserialize_long, + "bytearray": Deserializer.deserialize_bytearray, + "base64": Deserializer.deserialize_base64, + "object": self.deserialize_object, + "[]": self.deserialize_iter, + "{}": self.deserialize_dict, + } + self.deserialize_expected_types = { + "duration": (isodate.Duration, datetime.timedelta), + "iso-8601": (datetime.datetime), + } + self.dependencies: Dict[str, Type[ModelType]] = dict(classes) if classes else {} + self.key_extractors = [rest_key_extractor, xml_key_extractor] + # Additional properties only works if the "rest_key_extractor" is used to + # extract the keys. Making it to work whatever the key extractor is too much + # complicated, with no real scenario for now. + # So adding a flag to disable additional properties detection. This flag should be + # used if your expect the deserialization to NOT come from a JSON REST syntax. + # Otherwise, result are unexpected + self.additional_properties_detection = True + + def __call__(self, target_obj, response_data, content_type=None): + """Call the deserializer to process a REST response. + + :param str target_obj: Target data type to deserialize to. + :param requests.Response response_data: REST response object. + :param str content_type: Swagger "produces" if available. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + data = self._unpack_content(response_data, content_type) + return self._deserialize(target_obj, data) + + def _deserialize(self, target_obj, data): + """Call the deserializer on a model. + + Data needs to be already deserialized as JSON or XML ElementTree + + :param str target_obj: Target data type to deserialize to. + :param object data: Object to deserialize. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + # This is already a model, go recursive just in case + if hasattr(data, "_attribute_map"): + constants = [name for name, config in getattr(data, "_validation", {}).items() if config.get("constant")] + try: + for attr, mapconfig in data._attribute_map.items(): + if attr in constants: + continue + value = getattr(data, attr) + if value is None: + continue + local_type = mapconfig["type"] + internal_data_type = local_type.strip("[]{}") + if internal_data_type not in self.dependencies or isinstance(internal_data_type, Enum): + continue + setattr(data, attr, self._deserialize(local_type, value)) + return data + except AttributeError: + return + + response, class_name = self._classify_target(target_obj, data) + + if isinstance(response, basestring): + return self.deserialize_data(data, response) + elif isinstance(response, type) and issubclass(response, Enum): + return self.deserialize_enum(data, response) + + if data is None: + return data + try: + attributes = response._attribute_map # type: ignore + d_attrs = {} + for attr, attr_desc in attributes.items(): + # Check empty string. If it's not empty, someone has a real "additionalProperties"... + if attr == "additional_properties" and attr_desc["key"] == "": + continue + raw_value = None + # Enhance attr_desc with some dynamic data + attr_desc = attr_desc.copy() # Do a copy, do not change the real one + internal_data_type = attr_desc["type"].strip("[]{}") + if internal_data_type in self.dependencies: + attr_desc["internalType"] = self.dependencies[internal_data_type] + + for key_extractor in self.key_extractors: + found_value = key_extractor(attr, attr_desc, data) + if found_value is not None: + if raw_value is not None and raw_value != found_value: + msg = ( + "Ignoring extracted value '%s' from %s for key '%s'" + " (duplicate extraction, follow extractors order)" + ) + _LOGGER.warning(msg, found_value, key_extractor, attr) + continue + raw_value = found_value + + value = self.deserialize_data(raw_value, attr_desc["type"]) + d_attrs[attr] = value + except (AttributeError, TypeError, KeyError) as err: + msg = "Unable to deserialize to object: " + class_name # type: ignore + raise_with_traceback(DeserializationError, msg, err) + else: + additional_properties = self._build_additional_properties(attributes, data) + return self._instantiate_model(response, d_attrs, additional_properties) + + def _build_additional_properties(self, attribute_map, data): + if not self.additional_properties_detection: + return None + if "additional_properties" in attribute_map and attribute_map.get("additional_properties", {}).get("key") != "": + # Check empty string. If it's not empty, someone has a real "additionalProperties" + return None + if isinstance(data, ET.Element): + data = {el.tag: el.text for el in data} + + known_keys = { + _decode_attribute_map_key(_FLATTEN.split(desc["key"])[0]) + for desc in attribute_map.values() + if desc["key"] != "" + } + present_keys = set(data.keys()) + missing_keys = present_keys - known_keys + return {key: data[key] for key in missing_keys} + + def _classify_target(self, target, data): + """Check to see whether the deserialization target object can + be classified into a subclass. + Once classification has been determined, initialize object. + + :param str target: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + """ + if target is None: + return None, None + + if isinstance(target, basestring): + try: + target = self.dependencies[target] + except KeyError: + return target, target + + try: + target = target._classify(data, self.dependencies) + except AttributeError: + pass # Target is not a Model, no classify + return target, target.__class__.__name__ # type: ignore + + def failsafe_deserialize(self, target_obj, data, content_type=None): + """Ignores any errors encountered in deserialization, + and falls back to not deserializing the object. Recommended + for use in error deserialization, as we want to return the + HttpResponseError to users, and not have them deal with + a deserialization error. + + :param str target_obj: The target object type to deserialize to. + :param str/dict data: The response data to deserialize. + :param str content_type: Swagger "produces" if available. + """ + try: + return self(target_obj, data, content_type=content_type) + except: + _LOGGER.debug( + "Ran into a deserialization error. Ignoring since this is failsafe deserialization", exc_info=True + ) + return None + + @staticmethod + def _unpack_content(raw_data, content_type=None): + """Extract the correct structure for deserialization. + + If raw_data is a PipelineResponse, try to extract the result of RawDeserializer. + if we can't, raise. Your Pipeline should have a RawDeserializer. + + If not a pipeline response and raw_data is bytes or string, use content-type + to decode it. If no content-type, try JSON. + + If raw_data is something else, bypass all logic and return it directly. + + :param raw_data: Data to be processed. + :param content_type: How to parse if raw_data is a string/bytes. + :raises JSONDecodeError: If JSON is requested and parsing is impossible. + :raises UnicodeDecodeError: If bytes is not UTF8 + """ + # Assume this is enough to detect a Pipeline Response without importing it + context = getattr(raw_data, "context", {}) + if context: + if RawDeserializer.CONTEXT_NAME in context: + return context[RawDeserializer.CONTEXT_NAME] + raise ValueError("This pipeline didn't have the RawDeserializer policy; can't deserialize") + + # Assume this is enough to recognize universal_http.ClientResponse without importing it + if hasattr(raw_data, "body"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text(), raw_data.headers) + + # Assume this enough to recognize requests.Response without importing it. + if hasattr(raw_data, "_content_consumed"): + return RawDeserializer.deserialize_from_http_generics(raw_data.text, raw_data.headers) + + if isinstance(raw_data, (basestring, bytes)) or hasattr(raw_data, "read"): + return RawDeserializer.deserialize_from_text(raw_data, content_type) # type: ignore + return raw_data + + def _instantiate_model(self, response, attrs, additional_properties=None): + """Instantiate a response model passing in deserialized args. + + :param response: The response model class. + :param d_attrs: The deserialized response attributes. + """ + if callable(response): + subtype = getattr(response, "_subtype_map", {}) + try: + readonly = [k for k, v in response._validation.items() if v.get("readonly")] + const = [k for k, v in response._validation.items() if v.get("constant")] + kwargs = {k: v for k, v in attrs.items() if k not in subtype and k not in readonly + const} + response_obj = response(**kwargs) + for attr in readonly: + setattr(response_obj, attr, attrs.get(attr)) + if additional_properties: + response_obj.additional_properties = additional_properties + return response_obj + except TypeError as err: + msg = "Unable to deserialize {} into model {}. ".format(kwargs, response) # type: ignore + raise DeserializationError(msg + str(err)) + else: + try: + for attr, value in attrs.items(): + setattr(response, attr, value) + return response + except Exception as exp: + msg = "Unable to populate response model. " + msg += "Type: {}, Error: {}".format(type(response), exp) + raise DeserializationError(msg) + + def deserialize_data(self, data, data_type): + """Process data for deserialization according to data type. + + :param str data: The response string to be deserialized. + :param str data_type: The type to deserialize to. + :raises: DeserializationError if deserialization fails. + :return: Deserialized object. + """ + if data is None: + return data + + try: + if not data_type: + return data + if data_type in self.basic_types.values(): + return self.deserialize_basic(data, data_type) + if data_type in self.deserialize_type: + if isinstance(data, self.deserialize_expected_types.get(data_type, tuple())): + return data + + is_a_text_parsing_type = lambda x: x not in ["object", "[]", r"{}"] + if isinstance(data, ET.Element) and is_a_text_parsing_type(data_type) and not data.text: + return None + data_val = self.deserialize_type[data_type](data) + return data_val + + iter_type = data_type[0] + data_type[-1] + if iter_type in self.deserialize_type: + return self.deserialize_type[iter_type](data, data_type[1:-1]) + + obj_type = self.dependencies[data_type] + if issubclass(obj_type, Enum): + if isinstance(data, ET.Element): + data = data.text + return self.deserialize_enum(data, obj_type) + + except (ValueError, TypeError, AttributeError) as err: + msg = "Unable to deserialize response data." + msg += " Data: {}, {}".format(data, data_type) + raise_with_traceback(DeserializationError, msg, err) + else: + return self._deserialize(obj_type, data) + + def deserialize_iter(self, attr, iter_type): + """Deserialize an iterable. + + :param list attr: Iterable to be deserialized. + :param str iter_type: The type of object in the iterable. + :rtype: list + """ + if attr is None: + return None + if isinstance(attr, ET.Element): # If I receive an element here, get the children + attr = list(attr) + if not isinstance(attr, (list, set)): + raise DeserializationError("Cannot deserialize as [{}] an object of type {}".format(iter_type, type(attr))) + return [self.deserialize_data(a, iter_type) for a in attr] + + def deserialize_dict(self, attr, dict_type): + """Deserialize a dictionary. + + :param dict/list attr: Dictionary to be deserialized. Also accepts + a list of key, value pairs. + :param str dict_type: The object type of the items in the dictionary. + :rtype: dict + """ + if isinstance(attr, list): + return {x["key"]: self.deserialize_data(x["value"], dict_type) for x in attr} + + if isinstance(attr, ET.Element): + # Transform value into {"Key": "value"} + attr = {el.tag: el.text for el in attr} + return {k: self.deserialize_data(v, dict_type) for k, v in attr.items()} + + def deserialize_object(self, attr, **kwargs): + """Deserialize a generic object. + This will be handled as a dictionary. + + :param dict attr: Dictionary to be deserialized. + :rtype: dict + :raises: TypeError if non-builtin datatype encountered. + """ + if attr is None: + return None + if isinstance(attr, ET.Element): + # Do no recurse on XML, just return the tree as-is + return attr + if isinstance(attr, basestring): + return self.deserialize_basic(attr, "str") + obj_type = type(attr) + if obj_type in self.basic_types: + return self.deserialize_basic(attr, self.basic_types[obj_type]) + if obj_type is _long_type: + return self.deserialize_long(attr) + + if obj_type == dict: + deserialized = {} + for key, value in attr.items(): + try: + deserialized[key] = self.deserialize_object(value, **kwargs) + except ValueError: + deserialized[key] = None + return deserialized + + if obj_type == list: + deserialized = [] + for obj in attr: + try: + deserialized.append(self.deserialize_object(obj, **kwargs)) + except ValueError: + pass + return deserialized + + else: + error = "Cannot deserialize generic object with type: " + raise TypeError(error + str(obj_type)) + + def deserialize_basic(self, attr, data_type): + """Deserialize basic builtin data type from string. + Will attempt to convert to str, int, float and bool. + This function will also accept '1', '0', 'true' and 'false' as + valid bool values. + + :param str attr: response string to be deserialized. + :param str data_type: deserialization data type. + :rtype: str, int, float or bool + :raises: TypeError if string format is not valid. + """ + # If we're here, data is supposed to be a basic type. + # If it's still an XML node, take the text + if isinstance(attr, ET.Element): + attr = attr.text + if not attr: + if data_type == "str": + # None or '', node is empty string. + return "" + else: + # None or '', node with a strong type is None. + # Don't try to model "empty bool" or "empty int" + return None + + if data_type == "bool": + if attr in [True, False, 1, 0]: + return bool(attr) + elif isinstance(attr, basestring): + if attr.lower() in ["true", "1"]: + return True + elif attr.lower() in ["false", "0"]: + return False + raise TypeError("Invalid boolean value: {}".format(attr)) + + if data_type == "str": + return self.deserialize_unicode(attr) + return eval(data_type)(attr) # nosec + + @staticmethod + def deserialize_unicode(data): + """Preserve unicode objects in Python 2, otherwise return data + as a string. + + :param str data: response string to be deserialized. + :rtype: str or unicode + """ + # We might be here because we have an enum modeled as string, + # and we try to deserialize a partial dict with enum inside + if isinstance(data, Enum): + return data + + # Consider this is real string + try: + if isinstance(data, unicode): # type: ignore + return data + except NameError: + return str(data) + else: + return str(data) + + @staticmethod + def deserialize_enum(data, enum_obj): + """Deserialize string into enum object. + + If the string is not a valid enum value it will be returned as-is + and a warning will be logged. + + :param str data: Response string to be deserialized. If this value is + None or invalid it will be returned as-is. + :param Enum enum_obj: Enum object to deserialize to. + :rtype: Enum + """ + if isinstance(data, enum_obj) or data is None: + return data + if isinstance(data, Enum): + data = data.value + if isinstance(data, int): + # Workaround. We might consider remove it in the future. + # https://github.com/Azure/azure-rest-api-specs/issues/141 + try: + return list(enum_obj.__members__.values())[data] + except IndexError: + error = "{!r} is not a valid index for enum {!r}" + raise DeserializationError(error.format(data, enum_obj)) + try: + return enum_obj(str(data)) + except ValueError: + for enum_value in enum_obj: + if enum_value.value.lower() == str(data).lower(): + return enum_value + # We don't fail anymore for unknown value, we deserialize as a string + _LOGGER.warning("Deserializer is not able to find %s as valid enum in %s", data, enum_obj) + return Deserializer.deserialize_unicode(data) + + @staticmethod + def deserialize_bytearray(attr): + """Deserialize string into bytearray. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return bytearray(b64decode(attr)) # type: ignore + + @staticmethod + def deserialize_base64(attr): + """Deserialize base64 encoded string into string. + + :param str attr: response string to be deserialized. + :rtype: bytearray + :raises: TypeError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + padding = "=" * (3 - (len(attr) + 3) % 4) # type: ignore + attr = attr + padding # type: ignore + encoded = attr.replace("-", "+").replace("_", "/") + return b64decode(encoded) + + @staticmethod + def deserialize_decimal(attr): + """Deserialize string into Decimal object. + + :param str attr: response string to be deserialized. + :rtype: Decimal + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + return decimal.Decimal(attr) # type: ignore + except decimal.DecimalException as err: + msg = "Invalid decimal {}".format(attr) + raise_with_traceback(DeserializationError, msg, err) + + @staticmethod + def deserialize_long(attr): + """Deserialize string into long (Py2) or int (Py3). + + :param str attr: response string to be deserialized. + :rtype: long or int + :raises: ValueError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + return _long_type(attr) # type: ignore + + @staticmethod + def deserialize_duration(attr): + """Deserialize ISO-8601 formatted string into TimeDelta object. + + :param str attr: response string to be deserialized. + :rtype: TimeDelta + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + duration = isodate.parse_duration(attr) + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize duration object." + raise_with_traceback(DeserializationError, msg, err) + else: + return duration + + @staticmethod + def deserialize_date(attr): + """Deserialize ISO-8601 formatted string into Date object. + + :param str attr: response string to be deserialized. + :rtype: Date + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + # This must NOT use defaultmonth/defaultday. Using None ensure this raises an exception. + return isodate.parse_date(attr, defaultmonth=None, defaultday=None) + + @staticmethod + def deserialize_time(attr): + """Deserialize ISO-8601 formatted string into time object. + + :param str attr: response string to be deserialized. + :rtype: datetime.time + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + if re.search(r"[^\W\d_]", attr, re.I + re.U): # type: ignore + raise DeserializationError("Date must have only digits and -. Received: %s" % attr) + return isodate.parse_time(attr) + + @staticmethod + def deserialize_rfc(attr): + """Deserialize RFC-1123 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + parsed_date = email.utils.parsedate_tz(attr) # type: ignore + date_obj = datetime.datetime( + *parsed_date[:6], tzinfo=_FixedOffset(datetime.timedelta(minutes=(parsed_date[9] or 0) / 60)) + ) + if not date_obj.tzinfo: + date_obj = date_obj.astimezone(tz=TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to rfc datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_iso(attr): + """Deserialize ISO-8601 formatted string into Datetime object. + + :param str attr: response string to be deserialized. + :rtype: Datetime + :raises: DeserializationError if string format invalid. + """ + if isinstance(attr, ET.Element): + attr = attr.text + try: + attr = attr.upper() # type: ignore + match = Deserializer.valid_date.match(attr) + if not match: + raise ValueError("Invalid datetime string: " + attr) + + check_decimal = attr.split(".") + if len(check_decimal) > 1: + decimal_str = "" + for digit in check_decimal[1]: + if digit.isdigit(): + decimal_str += digit + else: + break + if len(decimal_str) > 6: + attr = attr.replace(decimal_str, decimal_str[0:6]) + + date_obj = isodate.parse_datetime(attr) + test_utc = date_obj.utctimetuple() + if test_utc.tm_year > 9999 or test_utc.tm_year < 1: + raise OverflowError("Hit max or min date") + except (ValueError, OverflowError, AttributeError) as err: + msg = "Cannot deserialize datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj + + @staticmethod + def deserialize_unix(attr): + """Serialize Datetime object into IntTime format. + This is represented as seconds. + + :param int attr: Object to be serialized. + :rtype: Datetime + :raises: DeserializationError if format invalid + """ + if isinstance(attr, ET.Element): + attr = int(attr.text) # type: ignore + try: + date_obj = datetime.datetime.fromtimestamp(attr, TZ_UTC) + except ValueError as err: + msg = "Cannot deserialize to unix datetime object." + raise_with_traceback(DeserializationError, msg, err) + else: + return date_obj diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_vendor.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_vendor.py index 138f663c53a4..0dafe0e287ff 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_vendor.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/_vendor.py @@ -7,21 +7,10 @@ from azure.core.pipeline.transport import HttpRequest + def _convert_request(request, files=None): data = request.content if not files else None request = HttpRequest(method=request.method, url=request.url, headers=request.headers, data=data) if files: request.set_formdata_body(files) return request - -def _format_url_section(template, **kwargs): - components = template.split("/") - while components: - try: - return template.format(**kwargs) - except KeyError as key: - formatted_components = template.split("/") - components = [ - c for c in formatted_components if "{}".format(key.args[0]) not in c - ] - template = "/".join(components) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/__init__.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/__init__.py index f67ccda966f1..406c96a49744 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/__init__.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/__init__.py @@ -6,10 +6,18 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._azure_machine_learning_workspaces import AzureMachineLearningWorkspaces -__all__ = ['AzureMachineLearningWorkspaces'] +from ._azure_machine_learning_services import AzureMachineLearningServices -# `._patch.py` is used for handwritten extensions to the generated code -# Example: https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md -from ._patch import patch_sdk -patch_sdk() +try: + from ._patch import * # pylint: disable=unused-wildcard-import + from ._patch import __all__ as _patch_all +except ImportError: + _patch_all = [] +from ._patch import patch_sdk as _patch_sdk + +__all__ = [ + "AzureMachineLearningServices", +] +__all__.extend([p for p in _patch_all if p not in __all__]) + +_patch_sdk() diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/_azure_machine_learning_workspaces.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/_azure_machine_learning_services.py similarity index 71% rename from sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/_azure_machine_learning_workspaces.py rename to sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/_azure_machine_learning_services.py index 2ae7761d8c9c..aea3d39b2086 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/_azure_machine_learning_workspaces.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/_azure_machine_learning_services.py @@ -7,22 +7,70 @@ # -------------------------------------------------------------------------- from copy import deepcopy -from typing import Any, Awaitable, TYPE_CHECKING - -from msrest import Deserializer, Serializer +from typing import TYPE_CHECKING, Any, Awaitable from azure.core.rest import AsyncHttpResponse, HttpRequest from azure.mgmt.core import AsyncARMPipelineClient -from .. import models -from ._configuration import AzureMachineLearningWorkspacesConfiguration -from .operations import BatchDeploymentsOperations, BatchEndpointsOperations, CodeContainersOperations, CodeVersionsOperations, ComponentContainersOperations, ComponentVersionsOperations, ComputeOperations, DataContainersOperations, DataVersionsOperations, DatastoresOperations, EnvironmentContainersOperations, EnvironmentVersionsOperations, FeaturesOperations, FeaturesetContainersOperations, FeaturesetVersionsOperations, FeaturestoreEntityContainersOperations, FeaturestoreEntityVersionsOperations, JobsOperations, LabelingJobsOperations, ManagedNetworkProvisionsOperations, ManagedNetworkSettingsRuleOperations, ModelContainersOperations, ModelVersionsOperations, OnlineDeploymentsOperations, OnlineEndpointsOperations, Operations, PrivateEndpointConnectionsOperations, PrivateLinkResourcesOperations, QuotasOperations, RegistriesOperations, RegistryCodeContainersOperations, RegistryCodeVersionsOperations, RegistryComponentContainersOperations, RegistryComponentVersionsOperations, RegistryDataContainersOperations, RegistryDataVersionsOperations, RegistryEnvironmentContainersOperations, RegistryEnvironmentVersionsOperations, RegistryModelContainersOperations, RegistryModelVersionsOperations, SchedulesOperations, ServerlessEndpointsOperations, UsagesOperations, VirtualMachineSizesOperations, WorkspaceConnectionsOperations, WorkspaceFeaturesOperations, WorkspacesOperations +from .. import models as _models +from .._serialization import Deserializer, Serializer +from ._configuration import AzureMachineLearningServicesConfiguration +from .operations import ( + BatchDeploymentsOperations, + BatchEndpointsOperations, + CodeContainersOperations, + CodeVersionsOperations, + ComponentContainersOperations, + ComponentVersionsOperations, + ComputeOperations, + DataContainersOperations, + DatastoresOperations, + DataVersionsOperations, + EnvironmentContainersOperations, + EnvironmentVersionsOperations, + FeaturesetContainersOperations, + FeaturesetVersionsOperations, + FeaturesOperations, + FeaturestoreEntityContainersOperations, + FeaturestoreEntityVersionsOperations, + JobsOperations, + LabelingJobsOperations, + ManagedNetworkProvisionsOperations, + ManagedNetworkSettingsRuleOperations, + ModelContainersOperations, + ModelVersionsOperations, + OnlineDeploymentsOperations, + OnlineEndpointsOperations, + Operations, + PrivateEndpointConnectionsOperations, + PrivateLinkResourcesOperations, + QuotasOperations, + RegistriesOperations, + RegistryCodeContainersOperations, + RegistryCodeVersionsOperations, + RegistryComponentContainersOperations, + RegistryComponentVersionsOperations, + RegistryDataContainersOperations, + RegistryDataVersionsOperations, + RegistryEnvironmentContainersOperations, + RegistryEnvironmentVersionsOperations, + RegistryModelContainersOperations, + RegistryModelVersionsOperations, + SchedulesOperations, + ServerlessEndpointsOperations, + UsagesOperations, + VirtualMachineSizesOperations, + WorkspaceConnectionsOperations, + WorkspaceFeaturesOperations, + WorkspacesOperations, +) if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials_async import AsyncTokenCredential -class AzureMachineLearningWorkspaces: # pylint: disable=too-many-instance-attributes + +class AzureMachineLearningServices: # pylint: disable=client-accepts-api-version-keyword,too-many-instance-attributes """These APIs allow end users to operate on Azure Machine Learning Workspace resources. :ivar usages: UsagesOperations operations @@ -156,14 +204,14 @@ class AzureMachineLearningWorkspaces: # pylint: disable=too-many-instance-att :ivar managed_network_provisions: ManagedNetworkProvisionsOperations operations :vartype managed_network_provisions: azure.mgmt.machinelearningservices.aio.operations.ManagedNetworkProvisionsOperations - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The ID of the target subscription. + :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :param base_url: Service URL. Default value is 'https://management.azure.com'. + :param base_url: Service URL. Default value is "https://management.azure.com". :type base_url: str - :keyword api_version: Api Version. The default value is "2023-08-01-preview". Note that - overriding this default value may result in unsupported behavior. + :keyword api_version: Api Version. Default value is "2023-08-01-preview". Note that overriding + this default value may result in unsupported behavior. :paramtype api_version: str :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. @@ -176,67 +224,124 @@ def __init__( base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: - self._config = AzureMachineLearningWorkspacesConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) - self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) + self._config = AzureMachineLearningServicesConfiguration( + credential=credential, subscription_id=subscription_id, **kwargs + ) + self._client: AsyncARMPipelineClient = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs) - client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} + client_models = {k: v for k, v in _models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) self._serialize.client_side_validation = False self.usages = UsagesOperations(self._client, self._config, self._serialize, self._deserialize) - self.virtual_machine_sizes = VirtualMachineSizesOperations(self._client, self._config, self._serialize, self._deserialize) + self.virtual_machine_sizes = VirtualMachineSizesOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.quotas = QuotasOperations(self._client, self._config, self._serialize, self._deserialize) self.compute = ComputeOperations(self._client, self._config, self._serialize, self._deserialize) self.registries = RegistriesOperations(self._client, self._config, self._serialize, self._deserialize) - self.workspace_features = WorkspaceFeaturesOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_code_containers = RegistryCodeContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_code_versions = RegistryCodeVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_component_containers = RegistryComponentContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_component_versions = RegistryComponentVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_data_containers = RegistryDataContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_data_versions = RegistryDataVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_environment_containers = RegistryEnvironmentContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_environment_versions = RegistryEnvironmentVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_model_containers = RegistryModelContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.registry_model_versions = RegistryModelVersionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.workspace_features = WorkspaceFeaturesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_code_containers = RegistryCodeContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_code_versions = RegistryCodeVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_component_containers = RegistryComponentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_component_versions = RegistryComponentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_data_containers = RegistryDataContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_data_versions = RegistryDataVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_environment_containers = RegistryEnvironmentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_environment_versions = RegistryEnvironmentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_model_containers = RegistryModelContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.registry_model_versions = RegistryModelVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.batch_endpoints = BatchEndpointsOperations(self._client, self._config, self._serialize, self._deserialize) - self.batch_deployments = BatchDeploymentsOperations(self._client, self._config, self._serialize, self._deserialize) + self.batch_deployments = BatchDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.code_containers = CodeContainersOperations(self._client, self._config, self._serialize, self._deserialize) self.code_versions = CodeVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.component_containers = ComponentContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.component_versions = ComponentVersionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.component_containers = ComponentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.component_versions = ComponentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.data_containers = DataContainersOperations(self._client, self._config, self._serialize, self._deserialize) self.data_versions = DataVersionsOperations(self._client, self._config, self._serialize, self._deserialize) self.datastores = DatastoresOperations(self._client, self._config, self._serialize, self._deserialize) - self.environment_containers = EnvironmentContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.environment_versions = EnvironmentVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.featureset_containers = FeaturesetContainersOperations(self._client, self._config, self._serialize, self._deserialize) + self.environment_containers = EnvironmentContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.environment_versions = EnvironmentVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.featureset_containers = FeaturesetContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.features = FeaturesOperations(self._client, self._config, self._serialize, self._deserialize) - self.featureset_versions = FeaturesetVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.featurestore_entity_containers = FeaturestoreEntityContainersOperations(self._client, self._config, self._serialize, self._deserialize) - self.featurestore_entity_versions = FeaturestoreEntityVersionsOperations(self._client, self._config, self._serialize, self._deserialize) + self.featureset_versions = FeaturesetVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.featurestore_entity_containers = FeaturestoreEntityContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.featurestore_entity_versions = FeaturestoreEntityVersionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.jobs = JobsOperations(self._client, self._config, self._serialize, self._deserialize) self.labeling_jobs = LabelingJobsOperations(self._client, self._config, self._serialize, self._deserialize) - self.model_containers = ModelContainersOperations(self._client, self._config, self._serialize, self._deserialize) + self.model_containers = ModelContainersOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.model_versions = ModelVersionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.online_endpoints = OnlineEndpointsOperations(self._client, self._config, self._serialize, self._deserialize) - self.online_deployments = OnlineDeploymentsOperations(self._client, self._config, self._serialize, self._deserialize) + self.online_endpoints = OnlineEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.online_deployments = OnlineDeploymentsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.schedules = SchedulesOperations(self._client, self._config, self._serialize, self._deserialize) - self.serverless_endpoints = ServerlessEndpointsOperations(self._client, self._config, self._serialize, self._deserialize) + self.serverless_endpoints = ServerlessEndpointsOperations( + self._client, self._config, self._serialize, self._deserialize + ) self.operations = Operations(self._client, self._config, self._serialize, self._deserialize) self.workspaces = WorkspacesOperations(self._client, self._config, self._serialize, self._deserialize) - self.workspace_connections = WorkspaceConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.managed_network_settings_rule = ManagedNetworkSettingsRuleOperations(self._client, self._config, self._serialize, self._deserialize) - self.private_endpoint_connections = PrivateEndpointConnectionsOperations(self._client, self._config, self._serialize, self._deserialize) - self.private_link_resources = PrivateLinkResourcesOperations(self._client, self._config, self._serialize, self._deserialize) - self.managed_network_provisions = ManagedNetworkProvisionsOperations(self._client, self._config, self._serialize, self._deserialize) - + self.workspace_connections = WorkspaceConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.managed_network_settings_rule = ManagedNetworkSettingsRuleOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.private_endpoint_connections = PrivateEndpointConnectionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.private_link_resources = PrivateLinkResourcesOperations( + self._client, self._config, self._serialize, self._deserialize + ) + self.managed_network_provisions = ManagedNetworkProvisionsOperations( + self._client, self._config, self._serialize, self._deserialize + ) - def _send_request( - self, - request: HttpRequest, - **kwargs: Any - ) -> Awaitable[AsyncHttpResponse]: + def _send_request(self, request: HttpRequest, **kwargs: Any) -> Awaitable[AsyncHttpResponse]: """Runs the network request through the client's chained policies. >>> from azure.core.rest import HttpRequest @@ -245,7 +350,7 @@ def _send_request( >>> response = await client._send_request(request) - For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart + For more information on this code flow, see https://aka.ms/azsdk/dpcodegen/python/send_request :param request: The network request you want to make. Required. :type request: ~azure.core.rest.HttpRequest @@ -261,9 +366,9 @@ def _send_request( async def close(self) -> None: await self._client.close() - async def __aenter__(self) -> "AzureMachineLearningWorkspaces": + async def __aenter__(self) -> "AzureMachineLearningServices": await self._client.__aenter__() return self - async def __aexit__(self, *exc_details) -> None: + async def __aexit__(self, *exc_details: Any) -> None: await self._client.__aexit__(*exc_details) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/_configuration.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/_configuration.py index 9bb8f30ba250..23d6c8d41318 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/_configuration.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/_configuration.py @@ -6,7 +6,7 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, TYPE_CHECKING +from typing import TYPE_CHECKING, Any from azure.core.configuration import Configuration from azure.core.pipeline import policies @@ -19,29 +19,24 @@ from azure.core.credentials_async import AsyncTokenCredential -class AzureMachineLearningWorkspacesConfiguration(Configuration): # pylint: disable=too-many-instance-attributes - """Configuration for AzureMachineLearningWorkspaces. +class AzureMachineLearningServicesConfiguration(Configuration): # pylint: disable=too-many-instance-attributes + """Configuration for AzureMachineLearningServices. Note that all parameters used to create this instance are saved as instance attributes. - :param credential: Credential needed for the client to connect to Azure. + :param credential: Credential needed for the client to connect to Azure. Required. :type credential: ~azure.core.credentials_async.AsyncTokenCredential - :param subscription_id: The ID of the target subscription. + :param subscription_id: The ID of the target subscription. Required. :type subscription_id: str - :keyword api_version: Api Version. The default value is "2023-08-01-preview". Note that - overriding this default value may result in unsupported behavior. + :keyword api_version: Api Version. Default value is "2023-08-01-preview". Note that overriding + this default value may result in unsupported behavior. :paramtype api_version: str """ - def __init__( - self, - credential: "AsyncTokenCredential", - subscription_id: str, - **kwargs: Any - ) -> None: - super(AzureMachineLearningWorkspacesConfiguration, self).__init__(**kwargs) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None: + super(AzureMachineLearningServicesConfiguration, self).__init__(**kwargs) + api_version: str = kwargs.pop("api_version", "2023-08-01-preview") if credential is None: raise ValueError("Parameter 'credential' must not be None.") @@ -51,22 +46,21 @@ def __init__( self.credential = credential self.subscription_id = subscription_id self.api_version = api_version - self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default']) - kwargs.setdefault('sdk_moniker', 'mgmt-machinelearningservices/{}'.format(VERSION)) + self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"]) + kwargs.setdefault("sdk_moniker", "mgmt-machinelearningservices/{}".format(VERSION)) self._configure(**kwargs) - def _configure( - self, - **kwargs: Any - ) -> None: - self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs) - self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs) - self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs) - self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs) - self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs) - self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs) - self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) - self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs) - self.authentication_policy = kwargs.get('authentication_policy') + def _configure(self, **kwargs: Any) -> None: + self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs) + self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs) + self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs) + self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs) + self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs) + self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs) + self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs) + self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs) + self.authentication_policy = kwargs.get("authentication_policy") if self.credential and not self.authentication_policy: - self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs) + self.authentication_policy = AsyncARMChallengeAuthenticationPolicy( + self.credential, *self.credential_scopes, **kwargs + ) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/_patch.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/_patch.py index 74e48ecd07cf..f99e77fef986 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/_patch.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/_patch.py @@ -28,4 +28,4 @@ # This file is used for handwritten extensions to the generated code. Example: # https://github.com/Azure/azure-sdk-for-python/blob/main/doc/dev/customize_code/how-to-patch-sdk-code.md def patch_sdk(): - pass \ No newline at end of file + pass diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/__init__.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/__init__.py index 64d0c00d4f30..d37dfef71298 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/__init__.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/__init__.py @@ -6,100 +6,105 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._usages_operations import UsagesOperations -from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations -from ._quotas_operations import QuotasOperations -from ._compute_operations import ComputeOperations -from ._registries_operations import RegistriesOperations -from ._workspace_features_operations import WorkspaceFeaturesOperations -from ._registry_code_containers_operations import RegistryCodeContainersOperations -from ._registry_code_versions_operations import RegistryCodeVersionsOperations -from ._registry_component_containers_operations import RegistryComponentContainersOperations -from ._registry_component_versions_operations import RegistryComponentVersionsOperations -from ._registry_data_containers_operations import RegistryDataContainersOperations -from ._registry_data_versions_operations import RegistryDataVersionsOperations -from ._registry_environment_containers_operations import RegistryEnvironmentContainersOperations -from ._registry_environment_versions_operations import RegistryEnvironmentVersionsOperations -from ._registry_model_containers_operations import RegistryModelContainersOperations -from ._registry_model_versions_operations import RegistryModelVersionsOperations -from ._batch_endpoints_operations import BatchEndpointsOperations from ._batch_deployments_operations import BatchDeploymentsOperations +from ._batch_endpoints_operations import BatchEndpointsOperations from ._code_containers_operations import CodeContainersOperations from ._code_versions_operations import CodeVersionsOperations from ._component_containers_operations import ComponentContainersOperations from ._component_versions_operations import ComponentVersionsOperations +from ._compute_operations import ComputeOperations from ._data_containers_operations import DataContainersOperations from ._data_versions_operations import DataVersionsOperations from ._datastores_operations import DatastoresOperations from ._environment_containers_operations import EnvironmentContainersOperations from ._environment_versions_operations import EnvironmentVersionsOperations -from ._featureset_containers_operations import FeaturesetContainersOperations from ._features_operations import FeaturesOperations +from ._featureset_containers_operations import FeaturesetContainersOperations from ._featureset_versions_operations import FeaturesetVersionsOperations from ._featurestore_entity_containers_operations import FeaturestoreEntityContainersOperations from ._featurestore_entity_versions_operations import FeaturestoreEntityVersionsOperations from ._jobs_operations import JobsOperations from ._labeling_jobs_operations import LabelingJobsOperations +from ._managed_network_provisions_operations import ManagedNetworkProvisionsOperations +from ._managed_network_settings_rule_operations import ManagedNetworkSettingsRuleOperations from ._model_containers_operations import ModelContainersOperations from ._model_versions_operations import ModelVersionsOperations -from ._online_endpoints_operations import OnlineEndpointsOperations from ._online_deployments_operations import OnlineDeploymentsOperations -from ._schedules_operations import SchedulesOperations -from ._serverless_endpoints_operations import ServerlessEndpointsOperations +from ._online_endpoints_operations import OnlineEndpointsOperations from ._operations import Operations -from ._workspaces_operations import WorkspacesOperations -from ._workspace_connections_operations import WorkspaceConnectionsOperations -from ._managed_network_settings_rule_operations import ManagedNetworkSettingsRuleOperations +from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import __all__ as _patch_all +from ._patch import patch_sdk as _patch_sdk from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations from ._private_link_resources_operations import PrivateLinkResourcesOperations -from ._managed_network_provisions_operations import ManagedNetworkProvisionsOperations +from ._quotas_operations import QuotasOperations +from ._registries_operations import RegistriesOperations +from ._registry_code_containers_operations import RegistryCodeContainersOperations +from ._registry_code_versions_operations import RegistryCodeVersionsOperations +from ._registry_component_containers_operations import RegistryComponentContainersOperations +from ._registry_component_versions_operations import RegistryComponentVersionsOperations +from ._registry_data_containers_operations import RegistryDataContainersOperations +from ._registry_data_versions_operations import RegistryDataVersionsOperations +from ._registry_environment_containers_operations import RegistryEnvironmentContainersOperations +from ._registry_environment_versions_operations import RegistryEnvironmentVersionsOperations +from ._registry_model_containers_operations import RegistryModelContainersOperations +from ._registry_model_versions_operations import RegistryModelVersionsOperations +from ._schedules_operations import SchedulesOperations +from ._serverless_endpoints_operations import ServerlessEndpointsOperations +from ._usages_operations import UsagesOperations +from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations +from ._workspace_connections_operations import WorkspaceConnectionsOperations +from ._workspace_features_operations import WorkspaceFeaturesOperations +from ._workspaces_operations import WorkspacesOperations __all__ = [ - 'UsagesOperations', - 'VirtualMachineSizesOperations', - 'QuotasOperations', - 'ComputeOperations', - 'RegistriesOperations', - 'WorkspaceFeaturesOperations', - 'RegistryCodeContainersOperations', - 'RegistryCodeVersionsOperations', - 'RegistryComponentContainersOperations', - 'RegistryComponentVersionsOperations', - 'RegistryDataContainersOperations', - 'RegistryDataVersionsOperations', - 'RegistryEnvironmentContainersOperations', - 'RegistryEnvironmentVersionsOperations', - 'RegistryModelContainersOperations', - 'RegistryModelVersionsOperations', - 'BatchEndpointsOperations', - 'BatchDeploymentsOperations', - 'CodeContainersOperations', - 'CodeVersionsOperations', - 'ComponentContainersOperations', - 'ComponentVersionsOperations', - 'DataContainersOperations', - 'DataVersionsOperations', - 'DatastoresOperations', - 'EnvironmentContainersOperations', - 'EnvironmentVersionsOperations', - 'FeaturesetContainersOperations', - 'FeaturesOperations', - 'FeaturesetVersionsOperations', - 'FeaturestoreEntityContainersOperations', - 'FeaturestoreEntityVersionsOperations', - 'JobsOperations', - 'LabelingJobsOperations', - 'ModelContainersOperations', - 'ModelVersionsOperations', - 'OnlineEndpointsOperations', - 'OnlineDeploymentsOperations', - 'SchedulesOperations', - 'ServerlessEndpointsOperations', - 'Operations', - 'WorkspacesOperations', - 'WorkspaceConnectionsOperations', - 'ManagedNetworkSettingsRuleOperations', - 'PrivateEndpointConnectionsOperations', - 'PrivateLinkResourcesOperations', - 'ManagedNetworkProvisionsOperations', + "UsagesOperations", + "VirtualMachineSizesOperations", + "QuotasOperations", + "ComputeOperations", + "RegistriesOperations", + "WorkspaceFeaturesOperations", + "RegistryCodeContainersOperations", + "RegistryCodeVersionsOperations", + "RegistryComponentContainersOperations", + "RegistryComponentVersionsOperations", + "RegistryDataContainersOperations", + "RegistryDataVersionsOperations", + "RegistryEnvironmentContainersOperations", + "RegistryEnvironmentVersionsOperations", + "RegistryModelContainersOperations", + "RegistryModelVersionsOperations", + "BatchEndpointsOperations", + "BatchDeploymentsOperations", + "CodeContainersOperations", + "CodeVersionsOperations", + "ComponentContainersOperations", + "ComponentVersionsOperations", + "DataContainersOperations", + "DataVersionsOperations", + "DatastoresOperations", + "EnvironmentContainersOperations", + "EnvironmentVersionsOperations", + "FeaturesetContainersOperations", + "FeaturesOperations", + "FeaturesetVersionsOperations", + "FeaturestoreEntityContainersOperations", + "FeaturestoreEntityVersionsOperations", + "JobsOperations", + "LabelingJobsOperations", + "ModelContainersOperations", + "ModelVersionsOperations", + "OnlineEndpointsOperations", + "OnlineDeploymentsOperations", + "SchedulesOperations", + "ServerlessEndpointsOperations", + "Operations", + "WorkspacesOperations", + "WorkspaceConnectionsOperations", + "ManagedNetworkSettingsRuleOperations", + "PrivateEndpointConnectionsOperations", + "PrivateLinkResourcesOperations", + "ManagedNetworkProvisionsOperations", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_batch_deployments_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_batch_deployments_operations.py index 2a2a82cfbe6d..f6d89d4ad4ab 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_batch_deployments_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_batch_deployments_operations.py @@ -6,46 +6,61 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._batch_deployments_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request, build_update_request_initial -T = TypeVar('T') +from ...operations._batch_deployments_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, + build_update_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class BatchDeploymentsOperations: - """BatchDeploymentsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class BatchDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`batch_deployments` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -57,66 +72,75 @@ def list( top: Optional[int] = None, skip: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.BatchDeploymentTrackedResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.BatchDeployment"]: """Lists Batch inference deployments in the workspace. Lists Batch inference deployments in the workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Endpoint name. + :param endpoint_name: Endpoint name. Required. :type endpoint_name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Top of list. + :param top: Top of list. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either BatchDeploymentTrackedResourceArmPaginatedResult - or the result of cls(response) + :return: An iterator like instance of either BatchDeployment or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.BatchDeploymentTrackedResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.BatchDeploymentTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchDeploymentTrackedResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - endpoint_name=endpoint_name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -127,16 +151,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("BatchDeploymentTrackedResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -147,85 +170,86 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - deployment_name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - deployment_name: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete Batch Inference deployment (asynchronous). Delete Batch Inference deployment (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Endpoint name. + :param endpoint_name: Endpoint name. Required. :type endpoint_name: str - :param deployment_name: Inference deployment identifier. + :param deployment_name: Inference deployment identifier. Required. :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -237,99 +261,110 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - deployment_name: str, - **kwargs: Any - ) -> "_models.BatchDeployment": + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> _models.BatchDeployment: """Gets a batch inference deployment by id. Gets a batch inference deployment by id. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Endpoint name. + :param endpoint_name: Endpoint name. Required. :type endpoint_name: str - :param deployment_name: The identifier for the Batch deployments. + :param deployment_name: The identifier for the Batch deployments. Required. :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: BatchDeployment, or the result of cls(response) + :return: BatchDeployment or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.BatchDeployment - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchDeployment"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.BatchDeployment] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -337,15 +372,16 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('BatchDeployment', pipeline_response) + deserialized = self._deserialize("BatchDeployment", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}" + } async def _update_initial( self, @@ -353,63 +389,172 @@ async def _update_initial( workspace_name: str, endpoint_name: str, deployment_name: str, - body: "_models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties", + body: Union[_models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, IO], **kwargs: Any - ) -> Optional["_models.BatchDeployment"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.BatchDeployment"]] + ) -> Optional[_models.BatchDeployment]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.BatchDeployment]] = kwargs.pop("cls", None) - request = build_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._update_initial.metadata['url'], + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('BatchDeployment', pipeline_response) + deserialized = self._deserialize("BatchDeployment", pipeline_response) if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}" + } + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchDeployment]: + """Update a batch inference deployment (asynchronous). + + Update a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchDeployment]: + """Update a batch inference deployment (asynchronous). + + Update a batch inference deployment (asynchronous). + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_update( @@ -418,24 +563,30 @@ async def begin_update( workspace_name: str, endpoint_name: str, deployment_name: str, - body: "_models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties", + body: Union[_models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.BatchDeployment"]: + ) -> AsyncLROPoller[_models.BatchDeployment]: """Update a batch inference deployment (asynchronous). Update a batch inference deployment (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param deployment_name: The identifier for the Batch inference deployment. + :param deployment_name: The identifier for the Batch inference deployment. Required. :type deployment_name: str - :param body: Batch inference deployment definition object. + :param body: Batch inference deployment definition object. Is either a + PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties type or a IO type. Required. :type body: ~azure.mgmt.machinelearningservices.models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -448,17 +599,17 @@ async def begin_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchDeployment"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BatchDeployment] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._update_initial( resource_group_name=resource_group_name, @@ -468,32 +619,37 @@ async def begin_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('BatchDeployment', pipeline_response) + deserialized = self._deserialize("BatchDeployment", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}" + } async def _create_or_update_initial( self, @@ -501,62 +657,173 @@ async def _create_or_update_initial( workspace_name: str, endpoint_name: str, deployment_name: str, - body: "_models.BatchDeployment", + body: Union[_models.BatchDeployment, IO], **kwargs: Any - ) -> "_models.BatchDeployment": - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchDeployment"] + ) -> _models.BatchDeployment: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'BatchDeployment') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BatchDeployment] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "BatchDeployment") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('BatchDeployment', pipeline_response) + deserialized = self._deserialize("BatchDeployment", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('BatchDeployment', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("BatchDeployment", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.BatchDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchDeployment]: + """Creates/updates a batch inference deployment (asynchronous). + + Creates/updates a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchDeployment]: + """Creates/updates a batch inference deployment (asynchronous). + Creates/updates a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -565,23 +832,28 @@ async def begin_create_or_update( workspace_name: str, endpoint_name: str, deployment_name: str, - body: "_models.BatchDeployment", + body: Union[_models.BatchDeployment, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.BatchDeployment"]: + ) -> AsyncLROPoller[_models.BatchDeployment]: """Creates/updates a batch inference deployment (asynchronous). Creates/updates a batch inference deployment (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param deployment_name: The identifier for the Batch inference deployment. + :param deployment_name: The identifier for the Batch inference deployment. Required. :type deployment_name: str - :param body: Batch inference deployment definition object. - :type body: ~azure.mgmt.machinelearningservices.models.BatchDeployment + :param body: Batch inference deployment definition object. Is either a BatchDeployment type or + a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchDeployment or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -594,17 +866,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchDeployment"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BatchDeployment] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -614,29 +886,37 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('BatchDeployment', pipeline_response) + deserialized = self._deserialize("BatchDeployment", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_batch_endpoints_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_batch_endpoints_operations.py index c8214b7dab81..2ad341e606dd 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_batch_endpoints_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_batch_endpoints_operations.py @@ -6,46 +6,62 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._batch_endpoints_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_keys_request, build_list_request, build_update_request_initial -T = TypeVar('T') +from ...operations._batch_endpoints_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_keys_request, + build_list_request, + build_update_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class BatchEndpointsOperations: - """BatchEndpointsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class BatchEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`batch_endpoints` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -55,58 +71,69 @@ def list( count: Optional[int] = None, skip: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.BatchEndpointTrackedResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.BatchEndpoint"]: """Lists Batch inference endpoint in the workspace. Lists Batch inference endpoint in the workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param count: Number of endpoints to be retrieved in a page of results. + :param count: Number of endpoints to be retrieved in a page of results. Default value is None. :type count: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either BatchEndpointTrackedResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either BatchEndpoint or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.BatchEndpointTrackedResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.BatchEndpointTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchEndpointTrackedResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, count=count, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - count=count, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -117,16 +144,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("BatchEndpointTrackedResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -137,80 +163,83 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete Batch Inference Endpoint (asynchronous). Delete Batch Inference Endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference Endpoint name. + :param endpoint_name: Inference Endpoint name. Required. :type endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -222,94 +251,106 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - **kwargs: Any - ) -> "_models.BatchEndpoint": + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.BatchEndpoint: """Gets a batch inference endpoint by name. Gets a batch inference endpoint by name. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Name for the Batch Endpoint. + :param endpoint_name: Name for the Batch Endpoint. Required. :type endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: BatchEndpoint, or the result of cls(response) + :return: BatchEndpoint or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.BatchEndpoint - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchEndpoint"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.BatchEndpoint] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -317,77 +358,181 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('BatchEndpoint', pipeline_response) + deserialized = self._deserialize("BatchEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}" + } async def _update_initial( self, resource_group_name: str, workspace_name: str, endpoint_name: str, - body: "_models.PartialMinimalTrackedResourceWithIdentity", + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], **kwargs: Any - ) -> Optional["_models.BatchEndpoint"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.BatchEndpoint"]] + ) -> Optional[_models.BatchEndpoint]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PartialMinimalTrackedResourceWithIdentity') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.BatchEndpoint]] = kwargs.pop("cls", None) - request = build_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithIdentity") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._update_initial.metadata['url'], + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('BatchEndpoint', pipeline_response) + deserialized = self._deserialize("BatchEndpoint", pipeline_response) if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}" + } + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchEndpoint]: + """Update a batch inference endpoint (asynchronous). + + Update a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Mutable batch inference endpoint definition object. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchEndpoint]: + """Update a batch inference endpoint (asynchronous). + + Update a batch inference endpoint (asynchronous). + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Mutable batch inference endpoint definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_update( @@ -395,22 +540,27 @@ async def begin_update( resource_group_name: str, workspace_name: str, endpoint_name: str, - body: "_models.PartialMinimalTrackedResourceWithIdentity", + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.BatchEndpoint"]: + ) -> AsyncLROPoller[_models.BatchEndpoint]: """Update a batch inference endpoint (asynchronous). Update a batch inference endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Name for the Batch inference endpoint. + :param endpoint_name: Name for the Batch inference endpoint. Required. :type endpoint_name: str - :param body: Mutable batch inference endpoint definition object. + :param body: Mutable batch inference endpoint definition object. Is either a + PartialMinimalTrackedResourceWithIdentity type or a IO type. Required. :type body: - ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -423,17 +573,17 @@ async def begin_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchEndpoint"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BatchEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._update_initial( resource_group_name=resource_group_name, @@ -442,93 +592,203 @@ async def begin_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('BatchEndpoint', pipeline_response) + deserialized = self._deserialize("BatchEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}" + } async def _create_or_update_initial( self, resource_group_name: str, workspace_name: str, endpoint_name: str, - body: "_models.BatchEndpoint", + body: Union[_models.BatchEndpoint, IO], **kwargs: Any - ) -> "_models.BatchEndpoint": - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchEndpoint"] + ) -> _models.BatchEndpoint: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'BatchEndpoint') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BatchEndpoint] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "BatchEndpoint") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('BatchEndpoint', pipeline_response) + deserialized = self._deserialize("BatchEndpoint", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('BatchEndpoint', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("BatchEndpoint", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}" + } + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.BatchEndpoint, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchEndpoint]: + """Creates a batch inference endpoint (asynchronous). + + Creates a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Batch inference endpoint definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchEndpoint + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.BatchEndpoint]: + """Creates a batch inference endpoint (asynchronous). + + Creates a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Batch inference endpoint definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -536,21 +796,26 @@ async def begin_create_or_update( resource_group_name: str, workspace_name: str, endpoint_name: str, - body: "_models.BatchEndpoint", + body: Union[_models.BatchEndpoint, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.BatchEndpoint"]: + ) -> AsyncLROPoller[_models.BatchEndpoint]: """Creates a batch inference endpoint (asynchronous). Creates a batch inference endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Name for the Batch inference endpoint. + :param endpoint_name: Name for the Batch inference endpoint. Required. :type endpoint_name: str - :param body: Batch inference endpoint definition object. - :type body: ~azure.mgmt.machinelearningservices.models.BatchEndpoint + :param body: Batch inference endpoint definition object. Is either a BatchEndpoint type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchEndpoint or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -563,17 +828,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchEndpoint"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BatchEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -582,81 +847,93 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('BatchEndpoint', pipeline_response) + deserialized = self._deserialize("BatchEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}" + } @distributed_trace_async async def list_keys( - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - **kwargs: Any - ) -> "_models.EndpointAuthKeys": + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.EndpointAuthKeys: """Lists batch Inference Endpoint keys. Lists batch Inference Endpoint keys. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference Endpoint name. + :param endpoint_name: Inference Endpoint name. Required. :type endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EndpointAuthKeys, or the result of cls(response) + :return: EndpointAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EndpointAuthKeys"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EndpointAuthKeys] = kwargs.pop("cls", None) - request = build_list_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_keys.metadata['url'], + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -664,12 +941,13 @@ async def list_keys( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EndpointAuthKeys', pipeline_response) + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys"} # type: ignore - + list_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_code_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_code_containers_operations.py index 50d790f76259..ff4eca6860a4 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_code_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_code_containers_operations.py @@ -6,100 +6,122 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request -from ...operations._code_containers_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._code_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class CodeContainersOperations: - """CodeContainersOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class CodeContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`code_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name: str, - workspace_name: str, - skip: Optional[str] = None, - **kwargs: Any - ) -> AsyncIterable["_models.CodeContainerResourceArmPaginatedResult"]: + self, resource_group_name: str, workspace_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.CodeContainer"]: """List containers. List containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either CodeContainerResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either CodeContainer or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -110,16 +132,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("CodeContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -130,60 +151,64 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any ) -> None: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -194,57 +219,62 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any - ) -> "_models.CodeContainer": + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.CodeContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: CodeContainer, or the result of cls(response) + :return: CodeContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -252,15 +282,82 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('CodeContainer', pipeline_response) + deserialized = self._deserialize("CodeContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}" + } + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.CodeContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def create_or_update( @@ -268,55 +365,75 @@ async def create_or_update( resource_group_name: str, workspace_name: str, name: str, - body: "_models.CodeContainer", + body: Union[_models.CodeContainer, IO], **kwargs: Any - ) -> "_models.CodeContainer": + ) -> _models.CodeContainer: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer + :param body: Container entity to create or update. Is either a CodeContainer type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: CodeContainer, or the result of cls(response) + :return: CodeContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'CodeContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CodeContainer] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeContainer") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -325,15 +442,16 @@ async def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('CodeContainer', pipeline_response) + deserialized = self._deserialize("CodeContainer", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('CodeContainer', pipeline_response) + deserialized = self._deserialize("CodeContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_code_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_code_versions_operations.py index 04e125b60ec4..886e79a6b83a 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_code_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_code_versions_operations.py @@ -6,44 +6,59 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request -from ...operations._code_versions_operations import build_create_or_get_start_pending_upload_request, build_create_or_update_request, build_delete_request, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._code_versions_operations import ( + build_create_or_get_start_pending_upload_request, + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class CodeVersionsOperations: - """CodeVersionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class CodeVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`code_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -57,75 +72,82 @@ def list( hash: Optional[str] = None, hash_version: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.CodeVersionResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.CodeVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param hash: If specified, return CodeVersion assets with specified content hash value, - regardless of name. + regardless of name. Default value is None. :type hash: str - :param hash_version: Hash algorithm version when listing by hash. + :param hash_version: Hash algorithm version when listing by hash. Default value is None. :type hash_version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either CodeVersionResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either CodeVersion or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, hash=hash, hash_version=hash_version, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - name=name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - hash=hash, - hash_version=hash_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -136,16 +158,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("CodeVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -156,64 +177,67 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any ) -> None: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -224,61 +248,65 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any - ) -> "_models.CodeVersion": + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.CodeVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: CodeVersion, or the result of cls(response) + :return: CodeVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -286,15 +314,88 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('CodeVersion', pipeline_response) + deserialized = self._deserialize("CodeVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}" + } + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.CodeVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeVersion: + """Create or update version. + + Create or update version. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def create_or_update( @@ -303,58 +404,78 @@ async def create_or_update( workspace_name: str, name: str, version: str, - body: "_models.CodeVersion", + body: Union[_models.CodeVersion, IO], **kwargs: Any - ) -> "_models.CodeVersion": + ) -> _models.CodeVersion: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion + :param body: Version entity to create or update. Is either a CodeVersion type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: CodeVersion, or the result of cls(response) + :return: CodeVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CodeVersion] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'CodeVersion') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeVersion") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -363,18 +484,91 @@ async def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('CodeVersion', pipeline_response) + deserialized = self._deserialize("CodeVersion", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('CodeVersion', pipeline_response) + deserialized = self._deserialize("CodeVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized + return deserialized # type: ignore + + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}" + } + + @overload + async def create_or_get_start_pending_upload( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.PendingUploadRequestDto, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a code asset to. + + Generate a storage location and credential for the client to upload a code asset to. - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_get_start_pending_upload( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a code asset to. + Generate a storage location and credential for the client to upload a code asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def create_or_get_start_pending_upload( @@ -383,58 +577,78 @@ async def create_or_get_start_pending_upload( workspace_name: str, name: str, version: str, - body: "_models.PendingUploadRequestDto", + body: Union[_models.PendingUploadRequestDto, IO], **kwargs: Any - ) -> "_models.PendingUploadResponseDto": + ) -> _models.PendingUploadResponseDto: """Generate a storage location and credential for the client to upload a code asset to. Generate a storage location and credential for the client to upload a code asset to. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Pending upload request object. - :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto + :param body: Pending upload request object. Is either a PendingUploadRequestDto type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PendingUploadResponseDto, or the result of cls(response) + :return: PendingUploadResponseDto or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PendingUploadResponseDto"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PendingUploadResponseDto] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'PendingUploadRequestDto') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PendingUploadRequestDto") request = build_create_or_get_start_pending_upload_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_get_start_pending_upload.metadata['url'], + content=_content, + template_url=self.create_or_get_start_pending_upload.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -442,12 +656,13 @@ async def create_or_get_start_pending_upload( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PendingUploadResponseDto', pipeline_response) + deserialized = self._deserialize("PendingUploadResponseDto", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_get_start_pending_upload.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}/startPendingUpload"} # type: ignore - + create_or_get_start_pending_upload.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}/startPendingUpload" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_component_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_component_containers_operations.py index 70e2ded30ad2..0b213bde4332 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_component_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_component_containers_operations.py @@ -6,44 +6,58 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request -from ...operations._component_containers_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._component_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ComponentContainersOperations: - """ComponentContainersOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class ComponentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`component_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -51,60 +65,72 @@ def list( resource_group_name: str, workspace_name: str, skip: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, **kwargs: Any - ) -> AsyncIterable["_models.ComponentContainerResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.ComponentContainer"]: """List component containers. List component containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ComponentContainerResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either ComponentContainer or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComponentContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -115,16 +141,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ComponentContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -135,60 +160,64 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any ) -> None: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -199,57 +228,62 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any - ) -> "_models.ComponentContainer": + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.ComponentContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComponentContainer, or the result of cls(response) + :return: ComponentContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -257,15 +291,82 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ComponentContainer', pipeline_response) + deserialized = self._deserialize("ComponentContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}" + } + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ComponentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentContainer: + """Create or update container. + + Create or update container. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def create_or_update( @@ -273,55 +374,75 @@ async def create_or_update( resource_group_name: str, workspace_name: str, name: str, - body: "_models.ComponentContainer", + body: Union[_models.ComponentContainer, IO], **kwargs: Any - ) -> "_models.ComponentContainer": + ) -> _models.ComponentContainer: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :param body: Container entity to create or update. Is either a ComponentContainer type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComponentContainer, or the result of cls(response) + :return: ComponentContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ComponentContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComponentContainer] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentContainer") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -330,15 +451,16 @@ async def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('ComponentContainer', pipeline_response) + deserialized = self._deserialize("ComponentContainer", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('ComponentContainer', pipeline_response) + deserialized = self._deserialize("ComponentContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_component_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_component_versions_operations.py index 69413b02faad..fe89eda86dc3 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_component_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_component_versions_operations.py @@ -6,44 +6,58 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request -from ...operations._component_versions_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._component_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ComponentVersionsOperations: - """ComponentVersionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class ComponentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`component_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -54,77 +68,85 @@ def list( order_by: Optional[str] = None, top: Optional[int] = None, skip: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, stage: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.ComponentVersionResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.ComponentVersion"]: """List component versions. List component versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Component name. + :param name: Component name. Required. :type name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param stage: Component stage. + :param stage: Component stage. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ComponentVersionResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either ComponentVersion or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComponentVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, list_view_type=list_view_type, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - name=name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - list_view_type=list_view_type, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -135,16 +157,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ComponentVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -155,64 +176,67 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any ) -> None: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -223,61 +247,65 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any - ) -> "_models.ComponentVersion": + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.ComponentVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComponentVersion, or the result of cls(response) + :return: ComponentVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -285,15 +313,88 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ComponentVersion', pipeline_response) + deserialized = self._deserialize("ComponentVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}" + } + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.ComponentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentVersion: + """Create or update version. + + Create or update version. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def create_or_update( @@ -302,58 +403,78 @@ async def create_or_update( workspace_name: str, name: str, version: str, - body: "_models.ComponentVersion", + body: Union[_models.ComponentVersion, IO], **kwargs: Any - ) -> "_models.ComponentVersion": + ) -> _models.ComponentVersion: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :param body: Version entity to create or update. Is either a ComponentVersion type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComponentVersion, or the result of cls(response) + :return: ComponentVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComponentVersion] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'ComponentVersion') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentVersion") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -362,15 +483,16 @@ async def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('ComponentVersion', pipeline_response) + deserialized = self._deserialize("ComponentVersion", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('ComponentVersion', pipeline_response) + deserialized = self._deserialize("ComponentVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_compute_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_compute_operations.py index 7ef45eab5a6d..51e2028e2f03 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_compute_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_compute_operations.py @@ -6,100 +6,132 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._compute_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_allowed_resize_sizes_request, build_get_request, build_list_keys_request, build_list_nodes_request, build_list_request, build_resize_request_initial, build_restart_request_initial, build_start_request_initial, build_stop_request_initial, build_update_custom_services_request, build_update_idle_shutdown_setting_request, build_update_request_initial -T = TypeVar('T') +from ...operations._compute_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_allowed_resize_sizes_request, + build_get_request, + build_list_keys_request, + build_list_nodes_request, + build_list_request, + build_resize_request, + build_restart_request, + build_start_request, + build_stop_request, + build_update_custom_services_request, + build_update_idle_shutdown_setting_request, + build_update_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ComputeOperations: # pylint: disable=too-many-public-methods - """ComputeOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class ComputeOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`compute` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name: str, - workspace_name: str, - skip: Optional[str] = None, - **kwargs: Any - ) -> AsyncIterable["_models.PaginatedComputeResourcesList"]: + self, resource_group_name: str, workspace_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.ComputeResource"]: """Gets computes in specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PaginatedComputeResourcesList or the result of - cls(response) + :return: An iterator like instance of either ComputeResource or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedComputeResourcesList] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PaginatedComputeResourcesList] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.PaginatedComputeResourcesList"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -110,16 +142,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("PaginatedComputeResourcesList", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -130,59 +161,63 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs: Any - ) -> "_models.ComputeResource": + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> _models.ComputeResource: """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are not returned - use 'keys' nested resource to get them. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComputeResource, or the result of cls(response) + :return: ComputeResource or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComputeResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComputeResource] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -190,75 +225,179 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ComputeResource', pipeline_response) + deserialized = self._deserialize("ComputeResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" + } async def _create_or_update_initial( self, resource_group_name: str, workspace_name: str, compute_name: str, - parameters: "_models.ComputeResource", + parameters: Union[_models.ComputeResource, IO], **kwargs: Any - ) -> "_models.ComputeResource": - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] + ) -> _models.ComputeResource: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(parameters, 'ComputeResource') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComputeResource] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "ComputeResource") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ComputeResource', pipeline_response) + deserialized = self._deserialize("ComputeResource", pipeline_response) if response.status_code == 201: - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('ComputeResource', pipeline_response) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ComputeResource", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" + } + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ComputeResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComputeResource]: + """Creates or updates compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify + that it does not exist yet. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Payload with Machine Learning compute definition. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComputeResource]: + """Creates or updates compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify + that it does not exist yet. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Payload with Machine Learning compute definition. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -266,21 +405,26 @@ async def begin_create_or_update( resource_group_name: str, workspace_name: str, compute_name: str, - parameters: "_models.ComputeResource", + parameters: Union[_models.ComputeResource, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.ComputeResource"]: + ) -> AsyncLROPoller[_models.ComputeResource]: """Creates or updates compute. This call will overwrite a compute if it exists. This is a nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str - :param parameters: Payload with Machine Learning compute definition. - :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource + :param parameters: Payload with Machine Learning compute definition. Is either a + ComputeResource type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -293,17 +437,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComputeResource] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -312,85 +456,189 @@ async def begin_create_or_update( parameters=parameters, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ComputeResource', pipeline_response) + deserialized = self._deserialize("ComputeResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" + } async def _update_initial( self, resource_group_name: str, workspace_name: str, compute_name: str, - parameters: "_models.ClusterUpdateParameters", + parameters: Union[_models.ClusterUpdateParameters, IO], **kwargs: Any - ) -> "_models.ComputeResource": - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] + ) -> _models.ComputeResource: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(parameters, 'ClusterUpdateParameters') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComputeResource] = kwargs.pop("cls", None) - request = build_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "ClusterUpdateParameters") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._update_initial.metadata['url'], + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ComputeResource', pipeline_response) + deserialized = self._deserialize("ComputeResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" + } + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ClusterUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComputeResource]: + """Updates properties of a compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Additional parameters for cluster update. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComputeResource]: + """Updates properties of a compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Additional parameters for cluster update. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_update( @@ -398,20 +646,25 @@ async def begin_update( resource_group_name: str, workspace_name: str, compute_name: str, - parameters: "_models.ClusterUpdateParameters", + parameters: Union[_models.ClusterUpdateParameters, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.ComputeResource"]: + ) -> AsyncLROPoller[_models.ComputeResource]: """Updates properties of a compute. This call will overwrite a compute if it exists. This is a nonrecoverable operation. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str - :param parameters: Additional parameters for cluster update. - :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters + :param parameters: Additional parameters for cluster update. Is either a + ClusterUpdateParameters type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -424,17 +677,17 @@ async def begin_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComputeResource] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._update_initial( resource_group_name=resource_group_name, @@ -443,104 +696,121 @@ async def begin_update( parameters=parameters, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ComputeResource', pipeline_response) + deserialized = self._deserialize("ComputeResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, workspace_name: str, compute_name: str, - underlying_resource_action: Union[str, "_models.UnderlyingResourceAction"], + underlying_resource_action: Union[str, _models.UnderlyingResourceAction], **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, - api_version=api_version, + subscription_id=self._config.subscription_id, underlying_resource_action=underlying_resource_action, - template_url=self._delete_initial.metadata['url'], + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements + async def begin_delete( self, resource_group_name: str, workspace_name: str, compute_name: str, - underlying_resource_action: Union[str, "_models.UnderlyingResourceAction"], + underlying_resource_action: Union[str, _models.UnderlyingResourceAction], **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes specified Machine Learning compute. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the - underlying compute from workspace if 'Detach'. + underlying compute from workspace if 'Detach'. Known values are: "Delete" and "Detach". + Required. :type underlying_resource_action: str or ~azure.mgmt.machinelearningservices.models.UnderlyingResourceAction :keyword callable cls: A custom type or function that will be passed the direct response @@ -553,100 +823,188 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, underlying_resource_action=underlying_resource_action, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" + } - @distributed_trace_async + @overload async def update_custom_services( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, workspace_name: str, compute_name: str, - custom_services: List["_models.CustomService"], + custom_services: List[_models.CustomService], + *, + content_type: str = "application/json", **kwargs: Any ) -> None: """Updates the custom services list. The list of custom services provided shall be overwritten. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str - :param custom_services: New list of Custom Services. + :param custom_services: New list of Custom Services. Required. :type custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Required. + :type custom_services: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: Union[List[_models.CustomService], IO], + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Is either a [CustomService] type or a IO + type. Required. + :type custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) - _json = self._serialize.body(custom_services, '[CustomService]') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(custom_services, (IOBase, bytes)): + _content = custom_services + else: + _json = self._serialize.body(custom_services, "[CustomService]") request = build_update_custom_services_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update_custom_services.metadata['url'], + content=_content, + template_url=self.update_custom_services.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -657,62 +1015,72 @@ async def update_custom_services( # pylint: disable=inconsistent-return-stateme if cls: return cls(pipeline_response, None, {}) - update_custom_services.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices"} # type: ignore - + update_custom_services.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices" + } @distributed_trace def list_nodes( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs: Any - ) -> AsyncIterable["_models.AmlComputeNodesInformation"]: + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> AsyncIterable["_models.AmlComputeNodeInformation"]: """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either AmlComputeNodesInformation or the result of + :return: An iterator like instance of either AmlComputeNodeInformation or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.AmlComputeNodesInformation] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformation] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.AmlComputeNodesInformation] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.AmlComputeNodesInformation"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_nodes_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_nodes.metadata['url'], + template_url=self.list_nodes.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_nodes_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -723,16 +1091,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("AmlComputeNodesInformation", pipeline_response) list_of_elem = deserialized.nodes if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -743,58 +1110,62 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_nodes.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes"} # type: ignore + list_nodes.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes" + } @distributed_trace_async async def list_keys( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs: Any - ) -> "_models.ComputeSecrets": + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> _models.ComputeSecrets: """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComputeSecrets, or the result of cls(response) + :return: ComputeSecrets or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComputeSecrets - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeSecrets"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComputeSecrets] = kwargs.pop("cls", None) - request = build_list_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_keys.metadata['url'], + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -802,75 +1173,78 @@ async def list_keys( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ComputeSecrets', pipeline_response) + deserialized = self._deserialize("ComputeSecrets", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys"} # type: ignore - + list_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys" + } async def _start_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_start_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_start_request( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._start_initial.metadata['url'], + template_url=self._start_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _start_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start"} # type: ignore - + _start_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start" + } @distributed_trace_async - async def begin_start( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs: Any + async def begin_start( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Posts a start action to a compute instance. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -882,105 +1256,113 @@ async def begin_start( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._start_initial( + raw_result = await self._start_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_start.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start"} # type: ignore + begin_start.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start" + } async def _stop_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_stop_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_stop_request( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._stop_initial.metadata['url'], + template_url=self._stop_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _stop_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop"} # type: ignore - + _stop_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop" + } @distributed_trace_async - async def begin_stop( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs: Any + async def begin_stop( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Posts a stop action to a compute instance. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -992,105 +1374,113 @@ async def begin_stop( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._stop_initial( + raw_result = await self._stop_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_stop.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop"} # type: ignore + begin_stop.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop" + } async def _restart_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_restart_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_restart_request( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._restart_initial.metadata['url'], + template_url=self._restart_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _restart_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart"} # type: ignore - + _restart_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart" + } @distributed_trace_async - async def begin_restart( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs: Any + async def begin_restart( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Posts a restart action to a compute instance. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -1102,99 +1492,189 @@ async def begin_restart( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._restart_initial( + raw_result = await self._restart_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_restart.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart"} # type: ignore + begin_restart.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart" + } - @distributed_trace_async + @overload async def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, workspace_name: str, compute_name: str, - parameters: "_models.IdleShutdownSetting", + parameters: _models.IdleShutdownSetting, + *, + content_type: str = "application/json", **kwargs: Any ) -> None: """Updates the idle shutdown setting of a compute instance. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Required. :type parameters: ~azure.mgmt.machinelearningservices.models.IdleShutdownSetting + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.IdleShutdownSetting, IO], + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Is either a IdleShutdownSetting type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.IdleShutdownSetting or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(parameters, 'IdleShutdownSetting') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "IdleShutdownSetting") request = build_update_idle_shutdown_setting_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update_idle_shutdown_setting.metadata['url'], + content=_content, + template_url=self.update_idle_shutdown_setting.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1205,55 +1685,60 @@ async def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-s if cls: return cls(pipeline_response, None, {}) - update_idle_shutdown_setting.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting"} # type: ignore - + update_idle_shutdown_setting.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting" + } @distributed_trace_async async def get_allowed_resize_sizes( - self, - resource_group_name: str, - workspace_name: str, - compute_name: str, - **kwargs: Any - ) -> "_models.VirtualMachineSizeListResult": + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> _models.VirtualMachineSizeListResult: """Returns supported virtual machine sizes for resize. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: VirtualMachineSizeListResult, or the result of cls(response) + :return: VirtualMachineSizeListResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.VirtualMachineSizeListResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineSizeListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.VirtualMachineSizeListResult] = kwargs.pop("cls", None) - request = build_get_allowed_resize_sizes_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_allowed_resize_sizes.metadata['url'], + template_url=self.get_allowed_resize_sizes.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1261,88 +1746,112 @@ async def get_allowed_resize_sizes( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response) + deserialized = self._deserialize("VirtualMachineSizeListResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_allowed_resize_sizes.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/getAllowedVmSizesForResize"} # type: ignore - + get_allowed_resize_sizes.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/getAllowedVmSizesForResize" + } async def _resize_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, workspace_name: str, compute_name: str, - parameters: "_models.ResizeSchema", + parameters: Union[_models.ResizeSchema, IO], **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(parameters, 'ResizeSchema') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_resize_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "ResizeSchema") + + request = build_resize_request( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._resize_initial.metadata['url'], + content=_content, + template_url=self._resize_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) if cls: return cls(pipeline_response, None, response_headers) - _resize_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize"} # type: ignore - + _resize_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize" + } - @distributed_trace_async - async def begin_resize( # pylint: disable=inconsistent-return-statements + @overload + async def begin_resize( self, resource_group_name: str, workspace_name: str, compute_name: str, - parameters: "_models.ResizeSchema", + parameters: _models.ResizeSchema, + *, + content_type: str = "application/json", **kwargs: Any ) -> AsyncLROPoller[None]: """Updates the size of a Compute Instance. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :param parameters: The object for updating VM size setting of specified Compute Instance. + Required. :type parameters: ~azure.mgmt.machinelearningservices.models.ResizeSchema + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -1353,45 +1862,127 @@ async def begin_resize( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + async def begin_resize( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the size of a Compute Instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating VM size setting of specified Compute Instance. + Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_resize( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ResizeSchema, IO], + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Updates the size of a Compute Instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating VM size setting of specified Compute Instance. Is + either a ResizeSchema type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ResizeSchema or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._resize_initial( + raw_result = await self._resize_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, parameters=parameters, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_resize.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize"} # type: ignore + begin_resize.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_data_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_data_containers_operations.py index e6f2a968207b..8256d2bbe47b 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_data_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_data_containers_operations.py @@ -6,44 +6,58 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request -from ...operations._data_containers_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._data_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class DataContainersOperations: - """DataContainersOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class DataContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`data_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -51,60 +65,72 @@ def list( resource_group_name: str, workspace_name: str, skip: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, **kwargs: Any - ) -> AsyncIterable["_models.DataContainerResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.DataContainer"]: """List data containers. List data containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataContainerResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either DataContainer or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.DataContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.DataContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -115,16 +141,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("DataContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -135,60 +160,64 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any ) -> None: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -199,57 +228,62 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any - ) -> "_models.DataContainer": + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.DataContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataContainer, or the result of cls(response) + :return: DataContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -257,15 +291,82 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('DataContainer', pipeline_response) + deserialized = self._deserialize("DataContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}" + } + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.DataContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataContainer: + """Create or update container. + + Create or update container. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def create_or_update( @@ -273,55 +374,75 @@ async def create_or_update( resource_group_name: str, workspace_name: str, name: str, - body: "_models.DataContainer", + body: Union[_models.DataContainer, IO], **kwargs: Any - ) -> "_models.DataContainer": + ) -> _models.DataContainer: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.DataContainer + :param body: Container entity to create or update. Is either a DataContainer type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataContainer, or the result of cls(response) + :return: DataContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'DataContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataContainer] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DataContainer") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -330,15 +451,16 @@ async def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('DataContainer', pipeline_response) + deserialized = self._deserialize("DataContainer", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('DataContainer', pipeline_response) + deserialized = self._deserialize("DataContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_data_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_data_versions_operations.py index 3835c9addda0..a1ae76a498d0 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_data_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_data_versions_operations.py @@ -6,44 +6,58 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request -from ...operations._data_versions_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._data_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class DataVersionsOperations: - """DataVersionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class DataVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`data_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -55,85 +69,93 @@ def list( top: Optional[int] = None, skip: Optional[str] = None, tags: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, stage: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.DataVersionBaseResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.DataVersionBase"]: """List data versions in the data container. List data versions in the data container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Data container's name. + :param name: Data container's name. Required. :type name: str - :param order_by: Please choose OrderBy value from ['createdtime', 'modifiedtime']. + :param order_by: Please choose OrderBy value from ['createdtime', 'modifiedtime']. Default + value is None. :type order_by: str :param top: Top count of results, top count cannot be greater than the page size. If topCount > page size, results with be default page size count - will be returned. + will be returned. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, - ListViewType.All]View type for including/excluding (for example) archived entities. + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param stage: data stage. + :param stage: data stage. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataVersionBaseResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either DataVersionBase or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.DataVersionBaseResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.DataVersionBase] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataVersionBaseResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataVersionBaseResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, tags=tags, list_view_type=list_view_type, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - name=name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - tags=tags, - list_view_type=list_view_type, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -144,16 +166,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("DataVersionBaseResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -164,64 +185,67 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any ) -> None: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -232,61 +256,65 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any - ) -> "_models.DataVersionBase": + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.DataVersionBase: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataVersionBase, or the result of cls(response) + :return: DataVersionBase or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataVersionBase"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataVersionBase] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -294,15 +322,88 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('DataVersionBase', pipeline_response) + deserialized = self._deserialize("DataVersionBase", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}" + } + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.DataVersionBase, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataVersionBase: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataVersionBase: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def create_or_update( @@ -311,58 +412,78 @@ async def create_or_update( workspace_name: str, name: str, version: str, - body: "_models.DataVersionBase", + body: Union[_models.DataVersionBase, IO], **kwargs: Any - ) -> "_models.DataVersionBase": + ) -> _models.DataVersionBase: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :param body: Version entity to create or update. Is either a DataVersionBase type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataVersionBase, or the result of cls(response) + :return: DataVersionBase or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataVersionBase"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataVersionBase] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'DataVersionBase') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DataVersionBase") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -371,15 +492,16 @@ async def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('DataVersionBase', pipeline_response) + deserialized = self._deserialize("DataVersionBase", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('DataVersionBase', pipeline_response) + deserialized = self._deserialize("DataVersionBase", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_datastores_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_datastores_operations.py index 5013f852d334..1a9b45dcd14a 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_datastores_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_datastores_operations.py @@ -6,44 +6,59 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, List, Optional, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request -from ...operations._datastores_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request, build_list_secrets_request -T = TypeVar('T') +from ...operations._datastores_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, + build_list_secrets_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class DatastoresOperations: - """DatastoresOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class DatastoresOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`datastores` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -51,58 +66,64 @@ def list( resource_group_name: str, workspace_name: str, skip: Optional[str] = None, - count: Optional[int] = 30, + count: int = 30, is_default: Optional[bool] = None, names: Optional[List[str]] = None, search_text: Optional[str] = None, order_by: Optional[str] = None, - order_by_asc: Optional[bool] = False, + order_by_asc: bool = False, **kwargs: Any - ) -> AsyncIterable["_models.DatastoreResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.Datastore"]: """List datastores. List datastores. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param count: Maximum number of results to return. + :param count: Maximum number of results to return. Default value is 30. :type count: int - :param is_default: Filter down to the workspace default datastore. + :param is_default: Filter down to the workspace default datastore. Default value is None. :type is_default: bool - :param names: Names of datastores to return. + :param names: Names of datastores to return. Default value is None. :type names: list[str] - :param search_text: Text to search for in the datastore names. + :param search_text: Text to search for in the datastore names. Default value is None. :type search_text: str - :param order_by: Order by property (createdtime | modifiedtime | name). + :param order_by: Order by property (createdtime | modifiedtime | name). Default value is None. :type order_by: str - :param order_by_asc: Order by property in ascending order. + :param order_by_asc: Order by property in ascending order. Default value is False. :type order_by_asc: bool :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DatastoreResourceArmPaginatedResult or the result - of cls(response) + :return: An iterator like instance of either Datastore or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.DatastoreResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Datastore] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DatastoreResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.DatastoreResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, count=count, is_default=is_default, @@ -110,26 +131,26 @@ def prepare_request(next_link=None): search_text=search_text, order_by=order_by, order_by_asc=order_by_asc, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - count=count, - is_default=is_default, - names=names, - search_text=search_text, - order_by=order_by, - order_by_asc=order_by_asc, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -140,16 +161,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("DatastoreResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -160,60 +180,64 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any ) -> None: """Delete datastore. Delete datastore. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Datastore name. + :param name: Datastore name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -224,57 +248,60 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}" + } @distributed_trace_async - async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any - ) -> "_models.Datastore": + async def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.Datastore: """Get datastore. Get datastore. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Datastore name. + :param name: Datastore name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Datastore, or the result of cls(response) + :return: Datastore or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Datastore - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Datastore"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Datastore] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -282,15 +309,88 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Datastore', pipeline_response) + deserialized = self._deserialize("Datastore", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}" + } + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.Datastore, + skip_validation: bool = False, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Datastore: + """Create or update datastore. + + Create or update datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :param body: Datastore entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Datastore + :param skip_validation: Flag to skip validation. Default value is False. + :type skip_validation: bool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + skip_validation: bool = False, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Datastore: + """Create or update datastore. + + Create or update datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :param body: Datastore entity to create or update. Required. + :type body: IO + :param skip_validation: Flag to skip validation. Default value is False. + :type skip_validation: bool + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def create_or_update( @@ -298,59 +398,79 @@ async def create_or_update( resource_group_name: str, workspace_name: str, name: str, - body: "_models.Datastore", - skip_validation: Optional[bool] = False, + body: Union[_models.Datastore, IO], + skip_validation: bool = False, **kwargs: Any - ) -> "_models.Datastore": + ) -> _models.Datastore: """Create or update datastore. Create or update datastore. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Datastore name. + :param name: Datastore name. Required. :type name: str - :param body: Datastore entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.Datastore - :param skip_validation: Flag to skip validation. + :param body: Datastore entity to create or update. Is either a Datastore type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.Datastore or IO + :param skip_validation: Flag to skip validation. Default value is False. :type skip_validation: bool + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Datastore, or the result of cls(response) + :return: Datastore or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Datastore - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Datastore"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Datastore] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'Datastore') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Datastore") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, + skip_validation=skip_validation, api_version=api_version, content_type=content_type, json=_json, - skip_validation=skip_validation, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -359,67 +479,72 @@ async def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('Datastore', pipeline_response) + deserialized = self._deserialize("Datastore", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('Datastore', pipeline_response) + deserialized = self._deserialize("Datastore", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}" + } @distributed_trace_async async def list_secrets( - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any - ) -> "_models.DatastoreSecrets": + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.DatastoreSecrets: """Get datastore secrets. Get datastore secrets. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Datastore name. + :param name: Datastore name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatastoreSecrets, or the result of cls(response) + :return: DatastoreSecrets or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DatastoreSecrets - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DatastoreSecrets"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DatastoreSecrets] = kwargs.pop("cls", None) - request = build_list_secrets_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_secrets.metadata['url'], + template_url=self.list_secrets.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -427,12 +552,13 @@ async def list_secrets( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('DatastoreSecrets', pipeline_response) + deserialized = self._deserialize("DatastoreSecrets", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_secrets.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets"} # type: ignore - + list_secrets.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_environment_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_environment_containers_operations.py index 04798e327552..0fd5f3bb27a5 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_environment_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_environment_containers_operations.py @@ -6,44 +6,58 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request -from ...operations._environment_containers_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._environment_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class EnvironmentContainersOperations: - """EnvironmentContainersOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class EnvironmentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`environment_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -51,60 +65,73 @@ def list( resource_group_name: str, workspace_name: str, skip: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, **kwargs: Any - ) -> AsyncIterable["_models.EnvironmentContainerResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.EnvironmentContainer"]: """List environment containers. List environment containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either EnvironmentContainerResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either EnvironmentContainer or the result of + cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -115,16 +142,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("EnvironmentContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -135,60 +161,64 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any ) -> None: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -199,57 +229,62 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any - ) -> "_models.EnvironmentContainer": + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.EnvironmentContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EnvironmentContainer, or the result of cls(response) + :return: EnvironmentContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -257,15 +292,82 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}" + } + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.EnvironmentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentContainer: + """Create or update container. + + Create or update container. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def create_or_update( @@ -273,55 +375,75 @@ async def create_or_update( resource_group_name: str, workspace_name: str, name: str, - body: "_models.EnvironmentContainer", + body: Union[_models.EnvironmentContainer, IO], **kwargs: Any - ) -> "_models.EnvironmentContainer": + ) -> _models.EnvironmentContainer: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :param body: Container entity to create or update. Is either a EnvironmentContainer type or a + IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EnvironmentContainer, or the result of cls(response) + :return: EnvironmentContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'EnvironmentContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EnvironmentContainer] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentContainer") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -330,15 +452,16 @@ async def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_environment_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_environment_versions_operations.py index a48745f8d383..7c38c71064b8 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_environment_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_environment_versions_operations.py @@ -6,44 +6,58 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request -from ...operations._environment_versions_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._environment_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class EnvironmentVersionsOperations: - """EnvironmentVersionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class EnvironmentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`environment_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -54,78 +68,86 @@ def list( order_by: Optional[str] = None, top: Optional[int] = None, skip: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, stage: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.EnvironmentVersionResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.EnvironmentVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :param stage: Stage for including/excluding (for example) archived entities. Takes priority - over listViewType. + over listViewType. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either EnvironmentVersionResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either EnvironmentVersion or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, list_view_type=list_view_type, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - name=name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - list_view_type=list_view_type, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -136,16 +158,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("EnvironmentVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -156,64 +177,67 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any ) -> None: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -224,61 +248,65 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any - ) -> "_models.EnvironmentVersion": + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.EnvironmentVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EnvironmentVersion, or the result of cls(response) + :return: EnvironmentVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -286,15 +314,88 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}" + } + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.EnvironmentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentVersion: + """Creates or updates an EnvironmentVersion. + + Creates or updates an EnvironmentVersion. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Name of EnvironmentVersion. This is case-sensitive. Required. + :type name: str + :param version: Version of EnvironmentVersion. Required. + :type version: str + :param body: Definition of EnvironmentVersion. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentVersion: + """Creates or updates an EnvironmentVersion. + + Creates or updates an EnvironmentVersion. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Name of EnvironmentVersion. This is case-sensitive. Required. + :type name: str + :param version: Version of EnvironmentVersion. Required. + :type version: str + :param body: Definition of EnvironmentVersion. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def create_or_update( @@ -303,58 +404,78 @@ async def create_or_update( workspace_name: str, name: str, version: str, - body: "_models.EnvironmentVersion", + body: Union[_models.EnvironmentVersion, IO], **kwargs: Any - ) -> "_models.EnvironmentVersion": + ) -> _models.EnvironmentVersion: """Creates or updates an EnvironmentVersion. Creates or updates an EnvironmentVersion. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Name of EnvironmentVersion. This is case-sensitive. + :param name: Name of EnvironmentVersion. This is case-sensitive. Required. :type name: str - :param version: Version of EnvironmentVersion. + :param version: Version of EnvironmentVersion. Required. :type version: str - :param body: Definition of EnvironmentVersion. - :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :param body: Definition of EnvironmentVersion. Is either a EnvironmentVersion type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EnvironmentVersion, or the result of cls(response) + :return: EnvironmentVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EnvironmentVersion] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'EnvironmentVersion') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentVersion") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -363,15 +484,16 @@ async def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_features_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_features_operations.py index 02850fab89ac..476d130060dd 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_features_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_features_operations.py @@ -6,44 +6,52 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import urllib.parse from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request from ...operations._features_operations import build_get_request, build_list_request -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class FeaturesOperations: - """FeaturesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class FeaturesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`features` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -56,87 +64,93 @@ def list( tags: Optional[str] = None, feature_name: Optional[str] = None, description: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, - page_size: Optional[int] = 1000, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 1000, **kwargs: Any - ) -> AsyncIterable["_models.FeatureResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.Feature"]: """List Features. List Features. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param featureset_name: Featureset name. This is case-sensitive. + :param featureset_name: Featureset name. This is case-sensitive. Required. :type featureset_name: str - :param featureset_version: Featureset Version identifier. This is case-sensitive. + :param featureset_version: Featureset Version identifier. This is case-sensitive. Required. :type featureset_version: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str - :param feature_name: feature name. + :param feature_name: feature name. Default value is None. :type feature_name: str - :param description: Description of the featureset. + :param description: Description of the featureset. Default value is None. :type description: str :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, - ListViewType.All]View type for including/excluding (for example) archived entities. + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param page_size: Page size. + :param page_size: Page size. Default value is 1000. :type page_size: int :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FeatureResourceArmPaginatedResult or the result of - cls(response) + :return: An iterator like instance of either Feature or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.FeatureResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Feature] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeatureResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeatureResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, featureset_name=featureset_name, featureset_version=featureset_version, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, tags=tags, feature_name=feature_name, description=description, list_view_type=list_view_type, page_size=page_size, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - featureset_name=featureset_name, - featureset_version=featureset_version, - api_version=api_version, - skip=skip, - tags=tags, - feature_name=feature_name, - description=description, - list_view_type=list_view_type, - page_size=page_size, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -147,16 +161,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("FeatureResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -167,11 +180,11 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features" + } @distributed_trace_async async def get( @@ -182,53 +195,61 @@ async def get( featureset_version: str, feature_name: str, **kwargs: Any - ) -> "_models.Feature": + ) -> _models.Feature: """Get feature. Get feature. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param featureset_name: Feature set name. This is case-sensitive. + :param featureset_name: Feature set name. This is case-sensitive. Required. :type featureset_name: str - :param featureset_version: Feature set version identifier. This is case-sensitive. + :param featureset_version: Feature set version identifier. This is case-sensitive. Required. :type featureset_version: str - :param feature_name: Feature Name. This is case-sensitive. + :param feature_name: Feature Name. This is case-sensitive. Required. :type feature_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Feature, or the result of cls(response) + :return: Feature or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Feature - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Feature"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Feature] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, featureset_name=featureset_name, featureset_version=featureset_version, feature_name=feature_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -236,12 +257,13 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Feature', pipeline_response) + deserialized = self._deserialize("Feature", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features/{featureName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features/{featureName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featureset_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featureset_containers_operations.py index a407efebb072..77b53e9c8a9b 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featureset_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featureset_containers_operations.py @@ -6,46 +6,60 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._featureset_containers_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_entity_request, build_list_request -T = TypeVar('T') +from ...operations._featureset_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_entity_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class FeaturesetContainersOperations: - """FeaturesetContainersOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class FeaturesetContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`featureset_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -54,59 +68,66 @@ def list( workspace_name: str, skip: Optional[str] = None, tags: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, - page_size: Optional[int] = 20, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, name: Optional[str] = None, description: Optional[str] = None, created_by: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.FeaturesetContainerResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.FeaturesetContainer"]: """List featurestore entity containers. List featurestore entity containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, - ListViewType.All]View type for including/excluding (for example) archived entities. + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param page_size: page size. + :param page_size: page size. Default value is 20. :type page_size: int - :param name: name for the featureset. + :param name: name for the featureset. Default value is None. :type name: str - :param description: description for the feature set. + :param description: description for the feature set. Default value is None. :type description: str - :param created_by: createdBy user name. + :param created_by: createdBy user name. Default value is None. :type created_by: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FeaturesetContainerResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either FeaturesetContainer or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.FeaturesetContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, tags=tags, list_view_type=list_view_type, @@ -114,26 +135,26 @@ def prepare_request(next_link=None): name=name, description=description, created_by=created_by, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - tags=tags, - list_view_type=list_view_type, - page_size=page_size, - name=name, - description=description, - created_by=created_by, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -144,16 +165,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("FeaturesetContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -164,80 +184,83 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -249,94 +272,106 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } @distributed_trace_async async def get_entity( - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any - ) -> "_models.FeaturesetContainer": + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.FeaturesetContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: FeaturesetContainer, or the result of cls(response) + :return: FeaturesetContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.FeaturesetContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetContainer] = kwargs.pop("cls", None) - request = build_get_entity_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_entity.metadata['url'], + template_url=self.get_entity.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -344,76 +379,182 @@ async def get_entity( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('FeaturesetContainer', pipeline_response) + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_entity.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}"} # type: ignore - + get_entity.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } async def _create_or_update_initial( self, resource_group_name: str, workspace_name: str, name: str, - body: "_models.FeaturesetContainer", + body: Union[_models.FeaturesetContainer, IO], **kwargs: Any - ) -> "_models.FeaturesetContainer": - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetContainer"] + ) -> _models.FeaturesetContainer: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'FeaturesetContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetContainer] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturesetContainer") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('FeaturesetContainer', pipeline_response) + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('FeaturesetContainer', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.FeaturesetContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetContainer]: + """Create or update container. - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}"} # type: ignore + Create or update container. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -421,21 +562,26 @@ async def begin_create_or_update( resource_group_name: str, workspace_name: str, name: str, - body: "_models.FeaturesetContainer", + body: Union[_models.FeaturesetContainer, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.FeaturesetContainer"]: + ) -> AsyncLROPoller[_models.FeaturesetContainer]: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetContainer + :param body: Container entity to create or update. Is either a FeaturesetContainer type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -448,17 +594,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetContainer"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetContainer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -467,29 +613,37 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('FeaturesetContainer', pipeline_response) + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featureset_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featureset_versions_operations.py index b882a3bb2303..d9cdf22e8ee5 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featureset_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featureset_versions_operations.py @@ -6,46 +6,61 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._featureset_versions_operations import build_backfill_request_initial, build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._featureset_versions_operations import ( + build_backfill_request, + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class FeaturesetVersionsOperations: - """FeaturesetVersionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class FeaturesetVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`featureset_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -55,68 +70,75 @@ def list( name: str, skip: Optional[str] = None, tags: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, - page_size: Optional[int] = 20, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, version_name: Optional[str] = None, version: Optional[str] = None, description: Optional[str] = None, created_by: Optional[str] = None, stage: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.FeaturesetVersionResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.FeaturesetVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Featureset name. This is case-sensitive. + :param name: Featureset name. This is case-sensitive. Required. :type name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, - ListViewType.All]View type for including/excluding (for example) archived entities. + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param page_size: page size. + :param page_size: page size. Default value is 20. :type page_size: int - :param version_name: name for the featureset version. + :param version_name: name for the featureset version. Default value is None. :type version_name: str - :param version: featureset version. + :param version: featureset version. Default value is None. :type version: str - :param description: description for the feature set version. + :param description: description for the feature set version. Default value is None. :type description: str - :param created_by: createdBy user name. + :param created_by: createdBy user name. Default value is None. :type created_by: str - :param stage: Specifies the featurestore stage. + :param stage: Specifies the featurestore stage. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FeaturesetVersionResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either FeaturesetVersion or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.FeaturesetVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, tags=tags, list_view_type=list_view_type, @@ -126,29 +148,26 @@ def prepare_request(next_link=None): description=description, created_by=created_by, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - name=name, - api_version=api_version, - skip=skip, - tags=tags, - list_view_type=list_view_type, - page_size=page_size, - version_name=version_name, - version=version, - description=description, - created_by=created_by, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -159,16 +178,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("FeaturesetVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -179,85 +197,86 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -269,99 +288,110 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any - ) -> "_models.FeaturesetVersion": + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.FeaturesetVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: FeaturesetVersion, or the result of cls(response) + :return: FeaturesetVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.FeaturesetVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -369,15 +399,16 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('FeaturesetVersion', pipeline_response) + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } async def _create_or_update_initial( self, @@ -385,62 +416,173 @@ async def _create_or_update_initial( workspace_name: str, name: str, version: str, - body: "_models.FeaturesetVersion", + body: Union[_models.FeaturesetVersion, IO], **kwargs: Any - ) -> "_models.FeaturesetVersion": - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetVersion"] + ) -> _models.FeaturesetVersion: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'FeaturesetVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetVersion] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturesetVersion") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('FeaturesetVersion', pipeline_response) + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('FeaturesetVersion', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.FeaturesetVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}"} # type: ignore + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetVersion]: + """Create or update version. + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -449,23 +591,28 @@ async def begin_create_or_update( workspace_name: str, name: str, version: str, - body: "_models.FeaturesetVersion", + body: Union[_models.FeaturesetVersion, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.FeaturesetVersion"]: + ) -> AsyncLROPoller[_models.FeaturesetVersion]: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersion + :param body: Version entity to create or update. Is either a FeaturesetVersion type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -478,17 +625,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetVersion"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetVersion] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -498,32 +645,40 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('FeaturesetVersion', pipeline_response) + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } async def _backfill_initial( self, @@ -531,62 +686,168 @@ async def _backfill_initial( workspace_name: str, name: str, version: str, - body: "_models.FeaturesetVersionBackfillRequest", + body: Union[_models.FeaturesetVersionBackfillRequest, IO], **kwargs: Any - ) -> Optional["_models.FeaturesetVersionBackfillResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.FeaturesetVersionBackfillResponse"]] + ) -> Optional[_models.FeaturesetVersionBackfillResponse]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'FeaturesetVersionBackfillRequest') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.FeaturesetVersionBackfillResponse]] = kwargs.pop("cls", None) - request = build_backfill_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturesetVersionBackfillRequest") + + request = build_backfill_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._backfill_initial.metadata['url'], + content=_content, + template_url=self._backfill_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('FeaturesetVersionBackfillResponse', pipeline_response) + deserialized = self._deserialize("FeaturesetVersionBackfillResponse", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _backfill_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill"} # type: ignore + _backfill_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill" + } + @overload + async def begin_backfill( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.FeaturesetVersionBackfillRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetVersionBackfillResponse]: + """Backfill. + + Backfill. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Feature set version backfill request entity. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetVersionBackfillResponse or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_backfill( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturesetVersionBackfillResponse]: + """Backfill. + + Backfill. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Feature set version backfill request entity. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturesetVersionBackfillResponse or + the result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_backfill( @@ -595,23 +856,28 @@ async def begin_backfill( workspace_name: str, name: str, version: str, - body: "_models.FeaturesetVersionBackfillRequest", + body: Union[_models.FeaturesetVersionBackfillRequest, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.FeaturesetVersionBackfillResponse"]: + ) -> AsyncLROPoller[_models.FeaturesetVersionBackfillResponse]: """Backfill. Backfill. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Feature set version backfill request entity. - :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillRequest + :param body: Feature set version backfill request entity. Is either a + FeaturesetVersionBackfillRequest type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -624,17 +890,17 @@ async def begin_backfill( the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetVersionBackfillResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetVersionBackfillResponse] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._backfill_initial( resource_group_name=resource_group_name, @@ -644,29 +910,36 @@ async def begin_backfill( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('FeaturesetVersionBackfillResponse', pipeline_response) + deserialized = self._deserialize("FeaturesetVersionBackfillResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_backfill.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill"} # type: ignore + begin_backfill.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featurestore_entity_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featurestore_entity_containers_operations.py index c1dd1a35002f..cc9211d312c8 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featurestore_entity_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featurestore_entity_containers_operations.py @@ -6,46 +6,60 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._featurestore_entity_containers_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_entity_request, build_list_request -T = TypeVar('T') +from ...operations._featurestore_entity_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_entity_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class FeaturestoreEntityContainersOperations: - """FeaturestoreEntityContainersOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class FeaturestoreEntityContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`featurestore_entity_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -54,59 +68,67 @@ def list( workspace_name: str, skip: Optional[str] = None, tags: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, - page_size: Optional[int] = 20, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, name: Optional[str] = None, description: Optional[str] = None, created_by: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.FeaturestoreEntityContainerResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.FeaturestoreEntityContainer"]: """List featurestore entity containers. List featurestore entity containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, - ListViewType.All]View type for including/excluding (for example) archived entities. + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param page_size: page size. + :param page_size: page size. Default value is 20. :type page_size: int - :param name: name for the featurestore entity. + :param name: name for the featurestore entity. Default value is None. :type name: str - :param description: description for the featurestore entity. + :param description: description for the featurestore entity. Default value is None. :type description: str - :param created_by: createdBy user name. + :param created_by: createdBy user name. Default value is None. :type created_by: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either - FeaturestoreEntityContainerResourceArmPaginatedResult or the result of cls(response) + :return: An iterator like instance of either FeaturestoreEntityContainer or the result of + cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, tags=tags, list_view_type=list_view_type, @@ -114,26 +136,26 @@ def prepare_request(next_link=None): name=name, description=description, created_by=created_by, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - tags=tags, - list_view_type=list_view_type, - page_size=page_size, - name=name, - description=description, - created_by=created_by, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -144,16 +166,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("FeaturestoreEntityContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -164,80 +185,83 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -249,94 +273,106 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } @distributed_trace_async async def get_entity( - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any - ) -> "_models.FeaturestoreEntityContainer": + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.FeaturestoreEntityContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: FeaturestoreEntityContainer, or the result of cls(response) + :return: FeaturestoreEntityContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityContainer] = kwargs.pop("cls", None) - request = build_get_entity_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_entity.metadata['url'], + template_url=self.get_entity.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -344,76 +380,182 @@ async def get_entity( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('FeaturestoreEntityContainer', pipeline_response) + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_entity.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore - + get_entity.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } async def _create_or_update_initial( self, resource_group_name: str, workspace_name: str, name: str, - body: "_models.FeaturestoreEntityContainer", + body: Union[_models.FeaturestoreEntityContainer, IO], **kwargs: Any - ) -> "_models.FeaturestoreEntityContainer": - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityContainer"] + ) -> _models.FeaturestoreEntityContainer: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'FeaturestoreEntityContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityContainer] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturestoreEntityContainer") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('FeaturestoreEntityContainer', pipeline_response) + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('FeaturestoreEntityContainer', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.FeaturestoreEntityContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturestoreEntityContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturestoreEntityContainer or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturestoreEntityContainer]: + """Create or update container. - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore + Create or update container. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturestoreEntityContainer or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -421,21 +563,26 @@ async def begin_create_or_update( resource_group_name: str, workspace_name: str, name: str, - body: "_models.FeaturestoreEntityContainer", + body: Union[_models.FeaturestoreEntityContainer, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.FeaturestoreEntityContainer"]: + ) -> AsyncLROPoller[_models.FeaturestoreEntityContainer]: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer + :param body: Container entity to create or update. Is either a FeaturestoreEntityContainer type + or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -448,17 +595,17 @@ async def begin_create_or_update( result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityContainer"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityContainer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -467,29 +614,37 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('FeaturestoreEntityContainer', pipeline_response) + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featurestore_entity_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featurestore_entity_versions_operations.py index ad61a61c06bf..6b530d842d13 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featurestore_entity_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_featurestore_entity_versions_operations.py @@ -6,46 +6,60 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._featurestore_entity_versions_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._featurestore_entity_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class FeaturestoreEntityVersionsOperations: - """FeaturestoreEntityVersionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class FeaturestoreEntityVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`featurestore_entity_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -55,68 +69,76 @@ def list( name: str, skip: Optional[str] = None, tags: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, - page_size: Optional[int] = 20, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, version_name: Optional[str] = None, version: Optional[str] = None, description: Optional[str] = None, created_by: Optional[str] = None, stage: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.FeaturestoreEntityVersionResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.FeaturestoreEntityVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Feature entity name. This is case-sensitive. + :param name: Feature entity name. This is case-sensitive. Required. :type name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, - ListViewType.All]View type for including/excluding (for example) archived entities. + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param page_size: page size. + :param page_size: page size. Default value is 20. :type page_size: int - :param version_name: name for the featurestore entity version. + :param version_name: name for the featurestore entity version. Default value is None. :type version_name: str - :param version: featurestore entity version. + :param version: featurestore entity version. Default value is None. :type version: str - :param description: description for the feature entity version. + :param description: description for the feature entity version. Default value is None. :type description: str - :param created_by: createdBy user name. + :param created_by: createdBy user name. Default value is None. :type created_by: str - :param stage: Specifies the featurestore stage. + :param stage: Specifies the featurestore stage. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either - FeaturestoreEntityVersionResourceArmPaginatedResult or the result of cls(response) + :return: An iterator like instance of either FeaturestoreEntityVersion or the result of + cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, tags=tags, list_view_type=list_view_type, @@ -126,29 +148,26 @@ def prepare_request(next_link=None): description=description, created_by=created_by, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - name=name, - api_version=api_version, - skip=skip, - tags=tags, - list_view_type=list_view_type, - page_size=page_size, - version_name=version_name, - version=version, - description=description, - created_by=created_by, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -159,16 +178,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("FeaturestoreEntityVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -179,85 +197,86 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -269,99 +288,110 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any - ) -> "_models.FeaturestoreEntityVersion": + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.FeaturestoreEntityVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: FeaturestoreEntityVersion, or the result of cls(response) + :return: FeaturestoreEntityVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -369,15 +399,16 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('FeaturestoreEntityVersion', pipeline_response) + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } async def _create_or_update_initial( self, @@ -385,62 +416,173 @@ async def _create_or_update_initial( workspace_name: str, name: str, version: str, - body: "_models.FeaturestoreEntityVersion", + body: Union[_models.FeaturestoreEntityVersion, IO], **kwargs: Any - ) -> "_models.FeaturestoreEntityVersion": - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityVersion"] + ) -> _models.FeaturestoreEntityVersion: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'FeaturestoreEntityVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityVersion] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturestoreEntityVersion") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('FeaturestoreEntityVersion', pipeline_response) + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('FeaturestoreEntityVersion', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}"} # type: ignore + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.FeaturestoreEntityVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturestoreEntityVersion]: + """Create or update version. + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturestoreEntityVersion or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.FeaturestoreEntityVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either FeaturestoreEntityVersion or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -449,23 +591,28 @@ async def begin_create_or_update( workspace_name: str, name: str, version: str, - body: "_models.FeaturestoreEntityVersion", + body: Union[_models.FeaturestoreEntityVersion, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.FeaturestoreEntityVersion"]: + ) -> AsyncLROPoller[_models.FeaturestoreEntityVersion]: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion + :param body: Version entity to create or update. Is either a FeaturestoreEntityVersion type or + a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -478,17 +625,17 @@ async def begin_create_or_update( result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityVersion"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityVersion] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -498,29 +645,37 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('FeaturestoreEntityVersion', pipeline_response) + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_jobs_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_jobs_operations.py index 9480e9d2172f..cc83a23dadaa 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_jobs_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_jobs_operations.py @@ -6,46 +6,62 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._jobs_operations import build_cancel_request_initial, build_create_or_update_request, build_delete_request_initial, build_get_request, build_list_request, build_update_request -T = TypeVar('T') +from ...operations._jobs_operations import ( + build_cancel_request, + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, + build_update_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class JobsOperations: - """JobsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class JobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`jobs` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -55,60 +71,67 @@ def list( skip: Optional[str] = None, job_type: Optional[str] = None, tag: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, asset_name: Optional[str] = None, scheduled: Optional[bool] = None, schedule_id: Optional[str] = None, properties: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.JobBaseResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.JobBase"]: """Lists Jobs in the workspace. Lists Jobs in the workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param job_type: Type of job to be returned. + :param job_type: Type of job to be returned. Default value is None. :type job_type: str - :param tag: Jobs returned will have this tag key. + :param tag: Jobs returned will have this tag key. Default value is None. :type tag: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param asset_name: Asset name the job's named output is registered with. + :param asset_name: Asset name the job's named output is registered with. Default value is None. :type asset_name: str - :param scheduled: Indicator whether the job is scheduled job. + :param scheduled: Indicator whether the job is scheduled job. Default value is None. :type scheduled: bool - :param schedule_id: The scheduled id for listing the job triggered from. + :param schedule_id: The scheduled id for listing the job triggered from. Default value is None. :type schedule_id: str :param properties: Comma-separated list of property names (and optionally values). Example: - prop1,prop2=value2. + prop1,prop2=value2. Default value is None. :type properties: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either JobBaseResourceArmPaginatedResult or the result of - cls(response) + :return: An iterator like instance of either JobBase or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.JobBaseResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.JobBase] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.JobBaseResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobBaseResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, job_type=job_type, tag=tag, @@ -117,27 +140,26 @@ def prepare_request(next_link=None): scheduled=scheduled, schedule_id=schedule_id, properties=properties, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - job_type=job_type, - tag=tag, - list_view_type=list_view_type, - asset_name=asset_name, - scheduled=scheduled, - schedule_id=schedule_id, - properties=properties, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -148,16 +170,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("JobBaseResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -168,80 +189,83 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - id: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - id: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes a Job (asynchronous). Deletes a Job (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the Job. This is case-sensitive. + :param id: The name and identifier for the Job. This is case-sensitive. Required. :type id: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -253,94 +277,104 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}" + } @distributed_trace_async - async def get( - self, - resource_group_name: str, - workspace_name: str, - id: str, - **kwargs: Any - ) -> "_models.JobBase": + async def get(self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any) -> _models.JobBase: """Gets a Job by name/id. Gets a Job by name/id. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the Job. This is case-sensitive. + :param id: The name and identifier for the Job. This is case-sensitive. Required. :type id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: JobBase, or the result of cls(response) + :return: JobBase or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.JobBase - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobBase"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.JobBase] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -348,15 +382,82 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('JobBase', pipeline_response) + deserialized = self._deserialize("JobBase", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}" + } + + @overload + async def update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.PartialJobBasePartialResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: + """Updates a Job. + + Updates a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition to apply during the operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialJobBasePartialResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: + """Updates a Job. + + Updates a Job. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition to apply during the operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def update( @@ -364,55 +465,75 @@ async def update( resource_group_name: str, workspace_name: str, id: str, - body: "_models.PartialJobBasePartialResource", + body: Union[_models.PartialJobBasePartialResource, IO], **kwargs: Any - ) -> "_models.JobBase": + ) -> _models.JobBase: """Updates a Job. Updates a Job. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the Job. This is case-sensitive. + :param id: The name and identifier for the Job. This is case-sensitive. Required. :type id: str - :param body: Job definition to apply during the operation. - :type body: ~azure.mgmt.machinelearningservices.models.PartialJobBasePartialResource + :param body: Job definition to apply during the operation. Is either a + PartialJobBasePartialResource type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialJobBasePartialResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: JobBase, or the result of cls(response) + :return: JobBase or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.JobBase - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobBase"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.JobBase] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'PartialJobBasePartialResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialJobBasePartialResource") request = build_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update.metadata['url'], + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -420,71 +541,152 @@ async def update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('JobBase', pipeline_response) + deserialized = self._deserialize("JobBase", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}" + } - - @distributed_trace_async + @overload async def create_or_update( self, resource_group_name: str, workspace_name: str, id: str, - body: "_models.JobBase", + body: _models.JobBase, + *, + content_type: str = "application/json", **kwargs: Any - ) -> "_models.JobBase": + ) -> _models.JobBase: """Creates and executes a Job. Creates and executes a Job. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the Job. This is case-sensitive. + :param id: The name and identifier for the Job. This is case-sensitive. Required. :type id: str - :param body: Job definition object. + :param body: Job definition object. Required. :type body: ~azure.mgmt.machinelearningservices.models.JobBase + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: JobBase, or the result of cls(response) + :return: JobBase or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.JobBase - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: + """Creates and executes a Job. + + Creates and executes a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_update( + self, resource_group_name: str, workspace_name: str, id: str, body: Union[_models.JobBase, IO], **kwargs: Any + ) -> _models.JobBase: + """Creates and executes a Job. + + Creates and executes a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition object. Is either a JobBase type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.JobBase or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobBase"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.JobBase] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'JobBase') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "JobBase") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -493,86 +695,88 @@ async def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('JobBase', pipeline_response) + deserialized = self._deserialize("JobBase", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('JobBase', pipeline_response) + deserialized = self._deserialize("JobBase", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}" + } async def _cancel_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - id: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_cancel_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_cancel_request( resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._cancel_initial.metadata['url'], + template_url=self._cancel_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _cancel_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel"} # type: ignore - + _cancel_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel" + } @distributed_trace_async - async def begin_cancel( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - id: str, - **kwargs: Any + async def begin_cancel( + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Cancels a Job (asynchronous). Cancels a Job (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the Job. This is case-sensitive. + :param id: The name and identifier for the Job. This is case-sensitive. Required. :type id: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -584,42 +788,50 @@ async def begin_cancel( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._cancel_initial( + raw_result = await self._cancel_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_cancel.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel"} # type: ignore + begin_cancel.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_labeling_jobs_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_labeling_jobs_operations.py index 3f8f9d4488ab..6979954682f8 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_labeling_jobs_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_labeling_jobs_operations.py @@ -6,46 +6,63 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._labeling_jobs_operations import build_create_or_update_request_initial, build_delete_request, build_export_labels_request_initial, build_get_request, build_list_request, build_pause_request, build_resume_request_initial -T = TypeVar('T') +from ...operations._labeling_jobs_operations import ( + build_create_or_update_request, + build_delete_request, + build_export_labels_request, + build_get_request, + build_list_request, + build_pause_request, + build_resume_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class LabelingJobsOperations: - """LabelingJobsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class LabelingJobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`labeling_jobs` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -55,58 +72,69 @@ def list( skip: Optional[str] = None, top: Optional[int] = None, **kwargs: Any - ) -> AsyncIterable["_models.LabelingJobResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.LabelingJob"]: """Lists labeling jobs in the workspace. Lists labeling jobs in the workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param top: Number of labeling jobs to return. + :param top: Number of labeling jobs to return. Default value is None. :type top: int :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either LabelingJobResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either LabelingJob or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.LabelingJobResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJobResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.LabelingJobResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, top=top, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - top=top, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -117,16 +145,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("LabelingJobResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -137,60 +164,64 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - id: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any ) -> None: """Delete a labeling job. Delete a labeling job. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the LabelingJob. + :param id: The name and identifier for the LabelingJob. Required. :type id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -201,57 +232,60 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } @distributed_trace_async - async def get( - self, - resource_group_name: str, - workspace_name: str, - id: str, - **kwargs: Any - ) -> "_models.LabelingJob": + async def get(self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any) -> _models.LabelingJob: """Gets a labeling job by name/id. Gets a labeling job by name/id. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the LabelingJob. + :param id: The name and identifier for the LabelingJob. Required. :type id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: LabelingJob, or the result of cls(response) + :return: LabelingJob or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.LabelingJob - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.LabelingJob"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJob] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -259,76 +293,182 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('LabelingJob', pipeline_response) + deserialized = self._deserialize("LabelingJob", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } async def _create_or_update_initial( self, resource_group_name: str, workspace_name: str, id: str, - body: "_models.LabelingJob", + body: Union[_models.LabelingJob, IO], **kwargs: Any - ) -> "_models.LabelingJob": - cls = kwargs.pop('cls', None) # type: ClsType["_models.LabelingJob"] + ) -> _models.LabelingJob: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'LabelingJob') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.LabelingJob] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "LabelingJob") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('LabelingJob', pipeline_response) + deserialized = self._deserialize("LabelingJob", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('LabelingJob', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("LabelingJob", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.LabelingJob, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -336,21 +476,26 @@ async def begin_create_or_update( resource_group_name: str, workspace_name: str, id: str, - body: "_models.LabelingJob", + body: Union[_models.LabelingJob, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.LabelingJob"]: + ) -> AsyncLROPoller[_models.LabelingJob]: """Creates or updates a labeling job (asynchronous). Creates or updates a labeling job (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the LabelingJob. + :param id: The name and identifier for the LabelingJob. Required. :type id: str - :param body: LabelingJob definition object. - :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob + :param body: LabelingJob definition object. Is either a LabelingJob type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -363,17 +508,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.LabelingJob"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.LabelingJob] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -382,93 +527,205 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('LabelingJob', pipeline_response) + deserialized = self._deserialize("LabelingJob", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } async def _export_labels_initial( self, resource_group_name: str, workspace_name: str, id: str, - body: "_models.ExportSummary", + body: Union[_models.ExportSummary, IO], **kwargs: Any - ) -> Optional["_models.ExportSummary"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExportSummary"]] + ) -> Optional[_models.ExportSummary]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ExportSummary') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ExportSummary]] = kwargs.pop("cls", None) - request = build_export_labels_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ExportSummary") + + request = build_export_labels_request( resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._export_labels_initial.metadata['url'], + content=_content, + template_url=self._export_labels_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ExportSummary', pipeline_response) + deserialized = self._deserialize("ExportSummary", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _export_labels_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels"} # type: ignore + _export_labels_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels" + } + + @overload + async def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.ExportSummary, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_export_labels( @@ -476,9 +733,9 @@ async def begin_export_labels( resource_group_name: str, workspace_name: str, id: str, - body: "_models.ExportSummary", + body: Union[_models.ExportSummary, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.ExportSummary"]: + ) -> AsyncLROPoller[_models.ExportSummary]: """Export labels from a labeling job (asynchronous). Using the URL in the Location header, the status of the job export operation can be tracked. @@ -486,13 +743,17 @@ async def begin_export_labels( status of the job export operation can be tracked. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the LabelingJob. + :param id: The name and identifier for the LabelingJob. Required. :type id: str - :param body: The export summary. - :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary + :param body: The export summary. Is either a ExportSummary type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -505,17 +766,17 @@ async def begin_export_labels( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ExportSummary"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ExportSummary] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._export_labels_initial( resource_group_name=resource_group_name, @@ -524,81 +785,92 @@ async def begin_export_labels( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ExportSummary', pipeline_response) + deserialized = self._deserialize("ExportSummary", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_export_labels.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels"} # type: ignore + begin_export_labels.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels" + } @distributed_trace_async async def pause( - self, - resource_group_name: str, - workspace_name: str, - id: str, - **kwargs: Any - ) -> "_models.LabelingJobProperties": + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> _models.LabelingJobProperties: """Pause a labeling job. Pause a labeling job. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the LabelingJob. + :param id: The name and identifier for the LabelingJob. Required. :type id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: LabelingJobProperties, or the result of cls(response) + :return: LabelingJobProperties or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.LabelingJobProperties"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJobProperties] = kwargs.pop("cls", None) - request = build_pause_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.pause.metadata['url'], + template_url=self.pause.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -606,89 +878,91 @@ async def pause( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('LabelingJobProperties', pipeline_response) + deserialized = self._deserialize("LabelingJobProperties", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - pause.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause"} # type: ignore - + pause.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause" + } async def _resume_initial( - self, - resource_group_name: str, - workspace_name: str, - id: str, - **kwargs: Any - ) -> Optional["_models.LabelingJobProperties"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.LabelingJobProperties"]] + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> Optional[_models.LabelingJobProperties]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_resume_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Optional[_models.LabelingJobProperties]] = kwargs.pop("cls", None) + + request = build_resume_request( resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._resume_initial.metadata['url'], + template_url=self._resume_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('LabelingJobProperties', pipeline_response) + deserialized = self._deserialize("LabelingJobProperties", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _resume_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume"} # type: ignore - + _resume_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume" + } @distributed_trace_async async def begin_resume( - self, - resource_group_name: str, - workspace_name: str, - id: str, - **kwargs: Any - ) -> AsyncLROPoller["_models.LabelingJobProperties"]: + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> AsyncLROPoller[_models.LabelingJobProperties]: """Resume a labeling job (asynchronous). Resume a labeling job (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the LabelingJob. + :param id: The name and identifier for the LabelingJob. Required. :type id: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -702,45 +976,52 @@ async def begin_resume( of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.LabelingJobProperties] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.LabelingJobProperties"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJobProperties] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._resume_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('LabelingJobProperties', pipeline_response) + deserialized = self._deserialize("LabelingJobProperties", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_resume.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume"} # type: ignore + begin_resume.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_managed_network_provisions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_managed_network_provisions_operations.py index db954345d6d2..2dd6f57af708 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_managed_network_provisions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_managed_network_provisions_operations.py @@ -6,125 +6,234 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, Callable, Dict, Optional, TypeVar, Union +from io import IOBase +from typing import IO, Any, Callable, Dict, Optional, TypeVar, Union, cast, overload -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._managed_network_provisions_operations import build_provision_managed_network_request_initial -T = TypeVar('T') +from ...operations._managed_network_provisions_operations import build_provision_managed_network_request + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ManagedNetworkProvisionsOperations: - """ManagedNetworkProvisionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class ManagedNetworkProvisionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`managed_network_provisions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") async def _provision_managed_network_initial( self, resource_group_name: str, workspace_name: str, - body: Optional["_models.ManagedNetworkProvisionOptions"] = None, + body: Optional[Union[_models.ManagedNetworkProvisionOptions, IO]] = None, **kwargs: Any - ) -> Optional["_models.ManagedNetworkProvisionStatus"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ManagedNetworkProvisionStatus"]] + ) -> Optional[_models.ManagedNetworkProvisionStatus]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ManagedNetworkProvisionStatus]] = kwargs.pop("cls", None) - if body is not None: - _json = self._serialize.body(body, 'ManagedNetworkProvisionOptions') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = None + if body is not None: + _json = self._serialize.body(body, "ManagedNetworkProvisionOptions") + else: + _json = None - request = build_provision_managed_network_request_initial( - subscription_id=self._config.subscription_id, + request = build_provision_managed_network_request( resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._provision_managed_network_initial.metadata['url'], + content=_content, + template_url=self._provision_managed_network_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ManagedNetworkProvisionStatus', pipeline_response) + deserialized = self._deserialize("ManagedNetworkProvisionStatus", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _provision_managed_network_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork"} # type: ignore + _provision_managed_network_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork" + } + + @overload + async def begin_provision_managed_network( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[_models.ManagedNetworkProvisionOptions] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ManagedNetworkProvisionStatus]: + """Provisions the managed network of a machine learning workspace. + + Provisions the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: Managed Network Provisioning Options for a machine learning workspace. Default + value is None. + :type body: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionOptions + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ManagedNetworkProvisionStatus or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_provision_managed_network( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ManagedNetworkProvisionStatus]: + """Provisions the managed network of a machine learning workspace. + Provisions the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: Managed Network Provisioning Options for a machine learning workspace. Default + value is None. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ManagedNetworkProvisionStatus or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_provision_managed_network( self, resource_group_name: str, workspace_name: str, - body: Optional["_models.ManagedNetworkProvisionOptions"] = None, + body: Optional[Union[_models.ManagedNetworkProvisionOptions, IO]] = None, **kwargs: Any - ) -> AsyncLROPoller["_models.ManagedNetworkProvisionStatus"]: + ) -> AsyncLROPoller[_models.ManagedNetworkProvisionStatus]: """Provisions the managed network of a machine learning workspace. Provisions the managed network of a machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param body: Managed Network Provisioning Options for a machine learning workspace. - :type body: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionOptions + :param body: Managed Network Provisioning Options for a machine learning workspace. Is either a + ManagedNetworkProvisionOptions type or a IO type. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionOptions or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -137,17 +246,17 @@ async def begin_provision_managed_network( result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedNetworkProvisionStatus"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ManagedNetworkProvisionStatus] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._provision_managed_network_initial( resource_group_name=resource_group_name, @@ -155,29 +264,36 @@ async def begin_provision_managed_network( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ManagedNetworkProvisionStatus', pipeline_response) + deserialized = self._deserialize("ManagedNetworkProvisionStatus", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_provision_managed_network.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork"} # type: ignore + begin_provision_managed_network.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_managed_network_settings_rule_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_managed_network_settings_rule_operations.py index b55852ae5767..4a9c7eb9d4b9 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_managed_network_settings_rule_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_managed_network_settings_rule_operations.py @@ -6,97 +6,122 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._managed_network_settings_rule_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._managed_network_settings_rule_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ManagedNetworkSettingsRuleOperations: - """ManagedNetworkSettingsRuleOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class ManagedNetworkSettingsRuleOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`managed_network_settings_rule` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> AsyncIterable["_models.OutboundRuleListResult"]: + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncIterable["_models.OutboundRuleBasicResource"]: """Lists the managed network outbound rules for a machine learning workspace. Lists the managed network outbound rules for a machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OutboundRuleListResult or the result of + :return: An iterator like instance of either OutboundRuleBasicResource or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.OutboundRuleListResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OutboundRuleListResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.OutboundRuleListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -107,16 +132,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("OutboundRuleListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -127,78 +151,80 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - rule_name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, rule_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, rule_name=rule_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - rule_name: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, workspace_name: str, rule_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes an outbound rule from the managed network of a machine learning workspace. Deletes an outbound rule from the managed network of a machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param rule_name: Name of the workspace managed network outbound rule. + :param rule_name: Name of the workspace managed network outbound rule. Required. :type rule_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -210,94 +236,106 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, rule_name=rule_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - rule_name: str, - **kwargs: Any - ) -> "_models.OutboundRuleBasicResource": + self, resource_group_name: str, workspace_name: str, rule_name: str, **kwargs: Any + ) -> _models.OutboundRuleBasicResource: """Gets an outbound rule from the managed network of a machine learning workspace. Gets an outbound rule from the managed network of a machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param rule_name: Name of the workspace managed network outbound rule. + :param rule_name: Name of the workspace managed network outbound rule. Required. :type rule_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: OutboundRuleBasicResource, or the result of cls(response) + :return: OutboundRuleBasicResource or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OutboundRuleBasicResource"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OutboundRuleBasicResource] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, rule_name=rule_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -305,76 +343,179 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('OutboundRuleBasicResource', pipeline_response) + deserialized = self._deserialize("OutboundRuleBasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } async def _create_or_update_initial( self, resource_group_name: str, workspace_name: str, rule_name: str, - body: "_models.OutboundRuleBasicResource", + body: Union[_models.OutboundRuleBasicResource, IO], **kwargs: Any - ) -> Optional["_models.OutboundRuleBasicResource"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OutboundRuleBasicResource"]] + ) -> Optional[_models.OutboundRuleBasicResource]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'OutboundRuleBasicResource') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.OutboundRuleBasicResource]] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "OutboundRuleBasicResource") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, rule_name=rule_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('OutboundRuleBasicResource', pipeline_response) + deserialized = self._deserialize("OutboundRuleBasicResource", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + rule_name: str, + body: _models.OutboundRuleBasicResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OutboundRuleBasicResource]: + """Creates or updates an outbound rule in the managed network of a machine learning workspace. + + Creates or updates an outbound rule in the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param rule_name: Name of the workspace managed network outbound rule. Required. + :type rule_name: str + :param body: Outbound Rule to be created or updated in the managed network of a machine + learning workspace. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OutboundRuleBasicResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + rule_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OutboundRuleBasicResource]: + """Creates or updates an outbound rule in the managed network of a machine learning workspace. + + Creates or updates an outbound rule in the managed network of a machine learning workspace. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param rule_name: Name of the workspace managed network outbound rule. Required. + :type rule_name: str + :param body: Outbound Rule to be created or updated in the managed network of a machine + learning workspace. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OutboundRuleBasicResource or the + result of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -382,22 +523,26 @@ async def begin_create_or_update( resource_group_name: str, workspace_name: str, rule_name: str, - body: "_models.OutboundRuleBasicResource", + body: Union[_models.OutboundRuleBasicResource, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.OutboundRuleBasicResource"]: + ) -> AsyncLROPoller[_models.OutboundRuleBasicResource]: """Creates or updates an outbound rule in the managed network of a machine learning workspace. Creates or updates an outbound rule in the managed network of a machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param rule_name: Name of the workspace managed network outbound rule. + :param rule_name: Name of the workspace managed network outbound rule. Required. :type rule_name: str :param body: Outbound Rule to be created or updated in the managed network of a machine - learning workspace. - :type body: ~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource + learning workspace. Is either a OutboundRuleBasicResource type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -410,17 +555,17 @@ async def begin_create_or_update( result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.OutboundRuleBasicResource"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OutboundRuleBasicResource] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -429,29 +574,36 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('OutboundRuleBasicResource', pipeline_response) + deserialized = self._deserialize("OutboundRuleBasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_model_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_model_containers_operations.py index 8fef60838663..f571872199c0 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_model_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_model_containers_operations.py @@ -6,44 +6,58 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request -from ...operations._model_containers_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._model_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ModelContainersOperations: - """ModelContainersOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class ModelContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`model_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -52,64 +66,75 @@ def list( workspace_name: str, skip: Optional[str] = None, count: Optional[int] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, **kwargs: Any - ) -> AsyncIterable["_models.ModelContainerResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.ModelContainer"]: """List model containers. List model containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param count: Maximum number of results to return. + :param count: Maximum number of results to return. Default value is None. :type count: int - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ModelContainerResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either ModelContainer or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ModelContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, count=count, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - count=count, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -120,16 +145,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ModelContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -140,60 +164,64 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any ) -> None: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -204,57 +232,62 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any - ) -> "_models.ModelContainer": + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.ModelContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ModelContainer, or the result of cls(response) + :return: ModelContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -262,15 +295,82 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ModelContainer', pipeline_response) + deserialized = self._deserialize("ModelContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}" + } + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ModelContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def create_or_update( @@ -278,55 +378,75 @@ async def create_or_update( resource_group_name: str, workspace_name: str, name: str, - body: "_models.ModelContainer", + body: Union[_models.ModelContainer, IO], **kwargs: Any - ) -> "_models.ModelContainer": + ) -> _models.ModelContainer: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer + :param body: Container entity to create or update. Is either a ModelContainer type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ModelContainer, or the result of cls(response) + :return: ModelContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ModelContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ModelContainer] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelContainer") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -335,15 +455,16 @@ async def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('ModelContainer', pipeline_response) + deserialized = self._deserialize("ModelContainer", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('ModelContainer', pipeline_response) + deserialized = self._deserialize("ModelContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_model_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_model_versions_operations.py index d314037266ce..efb77f0b25f4 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_model_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_model_versions_operations.py @@ -6,46 +6,61 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._model_versions_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request, build_package_request_initial -T = TypeVar('T') +from ...operations._model_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, + build_package_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ModelVersionsOperations: - """ModelVersionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class ModelVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`model_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -62,67 +77,74 @@ def list( tags: Optional[str] = None, properties: Optional[str] = None, feed: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, stage: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.ModelVersionResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.ModelVersion"]: """List model versions. List model versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Model name. This is case-sensitive. + :param name: Model name. This is case-sensitive. Required. :type name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param version: Model version. + :param version: Model version. Default value is None. :type version: str - :param description: Model description. + :param description: Model description. Default value is None. :type description: str - :param offset: Number of initial results to skip. + :param offset: Number of initial results to skip. Default value is None. :type offset: int :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param properties: Comma-separated list of property names (and optionally values). Example: - prop1,prop2=value2. + prop1,prop2=value2. Default value is None. :type properties: str - :param feed: Name of the feed. + :param feed: Name of the feed. Default value is None. :type feed: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param stage: Model stage. + :param stage: Model stage. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ModelVersionResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either ModelVersion or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, order_by=order_by, top=top, @@ -134,31 +156,26 @@ def prepare_request(next_link=None): feed=feed, list_view_type=list_view_type, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - name=name, - api_version=api_version, - skip=skip, - order_by=order_by, - top=top, - version=version, - description=description, - offset=offset, - tags=tags, - properties=properties, - feed=feed, - list_view_type=list_view_type, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -169,16 +186,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ModelVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -189,64 +205,67 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any ) -> None: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -257,61 +276,65 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - version: str, - **kwargs: Any - ) -> "_models.ModelVersion": + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.ModelVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ModelVersion, or the result of cls(response) + :return: ModelVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -319,15 +342,88 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ModelVersion', pipeline_response) + deserialized = self._deserialize("ModelVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}" + } + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.ModelVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelVersion: + """Create or update version. + + Create or update version. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def create_or_update( @@ -336,58 +432,78 @@ async def create_or_update( workspace_name: str, name: str, version: str, - body: "_models.ModelVersion", + body: Union[_models.ModelVersion, IO], **kwargs: Any - ) -> "_models.ModelVersion": + ) -> _models.ModelVersion: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion + :param body: Version entity to create or update. Is either a ModelVersion type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ModelVersion, or the result of cls(response) + :return: ModelVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ModelVersion] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'ModelVersion') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelVersion") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -396,18 +512,19 @@ async def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('ModelVersion', pipeline_response) + deserialized = self._deserialize("ModelVersion", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('ModelVersion', pipeline_response) + deserialized = self._deserialize("ModelVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}" + } async def _package_initial( self, @@ -415,62 +532,168 @@ async def _package_initial( workspace_name: str, name: str, version: str, - body: "_models.PackageRequest", + body: Union[_models.PackageRequest, IO], **kwargs: Any - ) -> Optional["_models.PackageResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.PackageResponse"]] + ) -> Optional[_models.PackageResponse]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PackageRequest') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.PackageResponse]] = kwargs.pop("cls", None) - request = build_package_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PackageRequest") + + request = build_package_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._package_initial.metadata['url'], + content=_content, + template_url=self._package_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('PackageResponse', pipeline_response) + deserialized = self._deserialize("PackageResponse", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _package_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package"} # type: ignore + _package_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package" + } + @overload + async def begin_package( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.PackageRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_package( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_package( @@ -479,23 +702,28 @@ async def begin_package( workspace_name: str, name: str, version: str, - body: "_models.PackageRequest", + body: Union[_models.PackageRequest, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.PackageResponse"]: + ) -> AsyncLROPoller[_models.PackageResponse]: """Model Version Package operation. Model Version Package operation. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Package operation request body. - :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest + :param body: Package operation request body. Is either a PackageRequest type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -508,17 +736,17 @@ async def begin_package( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.PackageResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PackageResponse] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._package_initial( resource_group_name=resource_group_name, @@ -528,29 +756,36 @@ async def begin_package( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('PackageResponse', pipeline_response) + deserialized = self._deserialize("PackageResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_package.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package"} # type: ignore + begin_package.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_online_deployments_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_online_deployments_operations.py index 877796a6e655..cd78659b17b5 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_online_deployments_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_online_deployments_operations.py @@ -6,46 +6,63 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._online_deployments_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_logs_request, build_get_request, build_list_request, build_list_skus_request, build_update_request_initial -T = TypeVar('T') +from ...operations._online_deployments_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_logs_request, + build_get_request, + build_list_request, + build_list_skus_request, + build_update_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class OnlineDeploymentsOperations: - """OnlineDeploymentsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class OnlineDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`online_deployments` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -57,66 +74,75 @@ def list( top: Optional[int] = None, skip: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.OnlineDeploymentTrackedResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.OnlineDeployment"]: """List Inference Endpoint Deployments. List Inference Endpoint Deployments. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Top of list. + :param top: Top of list. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OnlineDeploymentTrackedResourceArmPaginatedResult - or the result of cls(response) + :return: An iterator like instance of either OnlineDeployment or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.OnlineDeploymentTrackedResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OnlineDeploymentTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineDeploymentTrackedResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - endpoint_name=endpoint_name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -127,16 +153,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("OnlineDeploymentTrackedResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -147,85 +172,86 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - deployment_name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - deployment_name: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete Inference Endpoint Deployment (asynchronous). Delete Inference Endpoint Deployment (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param deployment_name: Inference Endpoint Deployment name. + :param deployment_name: Inference Endpoint Deployment name. Required. :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -237,99 +263,110 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - deployment_name: str, - **kwargs: Any - ) -> "_models.OnlineDeployment": + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> _models.OnlineDeployment: """Get Inference Deployment Deployment. Get Inference Deployment Deployment. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param deployment_name: Inference Endpoint Deployment name. + :param deployment_name: Inference Endpoint Deployment name. Required. :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: OnlineDeployment, or the result of cls(response) + :return: OnlineDeployment or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.OnlineDeployment - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineDeployment"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OnlineDeployment] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -337,15 +374,16 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('OnlineDeployment', pipeline_response) + deserialized = self._deserialize("OnlineDeployment", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}" + } async def _update_initial( self, @@ -353,63 +391,171 @@ async def _update_initial( workspace_name: str, endpoint_name: str, deployment_name: str, - body: "_models.PartialMinimalTrackedResourceWithSku", + body: Union[_models.PartialMinimalTrackedResourceWithSku, IO], **kwargs: Any - ) -> Optional["_models.OnlineDeployment"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OnlineDeployment"]] + ) -> Optional[_models.OnlineDeployment]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PartialMinimalTrackedResourceWithSku') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.OnlineDeployment]] = kwargs.pop("cls", None) - request = build_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithSku") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._update_initial.metadata['url'], + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('OnlineDeployment', pipeline_response) + deserialized = self._deserialize("OnlineDeployment", pipeline_response) if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}" + } + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialMinimalTrackedResourceWithSku, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineDeployment]: + """Update Online Deployment (asynchronous). + + Update Online Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineDeployment]: + """Update Online Deployment (asynchronous). + + Update Online Deployment (asynchronous). + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_update( @@ -418,23 +564,29 @@ async def begin_update( workspace_name: str, endpoint_name: str, deployment_name: str, - body: "_models.PartialMinimalTrackedResourceWithSku", + body: Union[_models.PartialMinimalTrackedResourceWithSku, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.OnlineDeployment"]: + ) -> AsyncLROPoller[_models.OnlineDeployment]: """Update Online Deployment (asynchronous). Update Online Deployment (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str - :param deployment_name: Inference Endpoint Deployment name. + :param deployment_name: Inference Endpoint Deployment name. Required. :type deployment_name: str - :param body: Online Endpoint entity to apply during operation. - :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku + :param body: Online Endpoint entity to apply during operation. Is either a + PartialMinimalTrackedResourceWithSku type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -447,17 +599,17 @@ async def begin_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineDeployment"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OnlineDeployment] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._update_initial( resource_group_name=resource_group_name, @@ -467,32 +619,37 @@ async def begin_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('OnlineDeployment', pipeline_response) + deserialized = self._deserialize("OnlineDeployment", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}" + } async def _create_or_update_initial( self, @@ -500,62 +657,173 @@ async def _create_or_update_initial( workspace_name: str, endpoint_name: str, deployment_name: str, - body: "_models.OnlineDeployment", + body: Union[_models.OnlineDeployment, IO], **kwargs: Any - ) -> "_models.OnlineDeployment": - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineDeployment"] + ) -> _models.OnlineDeployment: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'OnlineDeployment') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OnlineDeployment] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "OnlineDeployment") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('OnlineDeployment', pipeline_response) + deserialized = self._deserialize("OnlineDeployment", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('OnlineDeployment', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("OnlineDeployment", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.OnlineDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineDeployment]: + """Create or update Inference Endpoint Deployment (asynchronous). + + Create or update Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Inference Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineDeployment]: + """Create or update Inference Endpoint Deployment (asynchronous). + Create or update Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Inference Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -564,23 +832,28 @@ async def begin_create_or_update( workspace_name: str, endpoint_name: str, deployment_name: str, - body: "_models.OnlineDeployment", + body: Union[_models.OnlineDeployment, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.OnlineDeployment"]: + ) -> AsyncLROPoller[_models.OnlineDeployment]: """Create or update Inference Endpoint Deployment (asynchronous). Create or update Inference Endpoint Deployment (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param deployment_name: Inference Endpoint Deployment name. + :param deployment_name: Inference Endpoint Deployment name. Required. :type deployment_name: str - :param body: Inference Endpoint entity to apply during operation. - :type body: ~azure.mgmt.machinelearningservices.models.OnlineDeployment + :param body: Inference Endpoint entity to apply during operation. Is either a OnlineDeployment + type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineDeployment or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -593,17 +866,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineDeployment"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OnlineDeployment] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -613,92 +886,192 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('OnlineDeployment', pipeline_response) + deserialized = self._deserialize("OnlineDeployment", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}" + } - @distributed_trace_async + @overload async def get_logs( self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, - body: "_models.DeploymentLogsRequest", + body: _models.DeploymentLogsRequest, + *, + content_type: str = "application/json", **kwargs: Any - ) -> "_models.DeploymentLogs": + ) -> _models.DeploymentLogs: """Polls an Endpoint operation. Polls an Endpoint operation. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param deployment_name: The name and identifier for the endpoint. + :param deployment_name: The name and identifier for the endpoint. Required. :type deployment_name: str - :param body: The request containing parameters for retrieving logs. + :param body: The request containing parameters for retrieving logs. Required. :type body: ~azure.mgmt.machinelearningservices.models.DeploymentLogsRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentLogs or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def get_logs( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DeploymentLogs: + """Polls an Endpoint operation. + + Polls an Endpoint operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The name and identifier for the endpoint. Required. + :type deployment_name: str + :param body: The request containing parameters for retrieving logs. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DeploymentLogs, or the result of cls(response) + :return: DeploymentLogs or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def get_logs( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.DeploymentLogsRequest, IO], + **kwargs: Any + ) -> _models.DeploymentLogs: + """Polls an Endpoint operation. + + Polls an Endpoint operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The name and identifier for the endpoint. Required. + :type deployment_name: str + :param body: The request containing parameters for retrieving logs. Is either a + DeploymentLogsRequest type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DeploymentLogsRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentLogs or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentLogs"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DeploymentLogs] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'DeploymentLogsRequest') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DeploymentLogsRequest") request = build_get_logs_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.get_logs.metadata['url'], + content=_content, + template_url=self.get_logs.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -706,15 +1079,16 @@ async def get_logs( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('DeploymentLogs', pipeline_response) + deserialized = self._deserialize("DeploymentLogs", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_logs.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs"} # type: ignore - + get_logs.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs" + } @distributed_trace def list_skus( @@ -726,66 +1100,75 @@ def list_skus( count: Optional[int] = None, skip: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.SkuResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.SkuResource"]: """List Inference Endpoint Deployment Skus. List Inference Endpoint Deployment Skus. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param deployment_name: Inference Endpoint Deployment name. + :param deployment_name: Inference Endpoint Deployment name. Required. :type deployment_name: str - :param count: Number of Skus to be retrieved in a page of results. + :param count: Number of Skus to be retrieved in a page of results. Default value is None. :type count: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either SkuResourceArmPaginatedResult or the result of - cls(response) + :return: An iterator like instance of either SkuResource or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.SkuResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.SkuResource] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.SkuResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.SkuResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_skus_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, - api_version=api_version, + subscription_id=self._config.subscription_id, count=count, skip=skip, - template_url=self.list_skus.metadata['url'], + api_version=api_version, + template_url=self.list_skus.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_skus_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - endpoint_name=endpoint_name, - deployment_name=deployment_name, - api_version=api_version, - count=count, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -796,16 +1179,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("SkuResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -816,8 +1198,8 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_skus.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/skus"} # type: ignore + list_skus.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/skus" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_online_endpoints_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_online_endpoints_operations.py index b95fc31a9339..1bc775592f2e 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_online_endpoints_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_online_endpoints_operations.py @@ -6,46 +6,64 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._online_endpoints_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_get_token_request, build_list_keys_request, build_list_request, build_regenerate_keys_request_initial, build_update_request_initial -T = TypeVar('T') +from ...operations._online_endpoints_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_get_token_request, + build_list_keys_request, + build_list_request, + build_regenerate_keys_request, + build_update_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class OnlineEndpointsOperations: - """OnlineEndpointsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class OnlineEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`online_endpoints` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -54,60 +72,69 @@ def list( workspace_name: str, name: Optional[str] = None, count: Optional[int] = None, - compute_type: Optional[Union[str, "_models.EndpointComputeType"]] = None, + compute_type: Optional[Union[str, _models.EndpointComputeType]] = None, skip: Optional[str] = None, tags: Optional[str] = None, properties: Optional[str] = None, - order_by: Optional[Union[str, "_models.OrderString"]] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, **kwargs: Any - ) -> AsyncIterable["_models.OnlineEndpointTrackedResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.OnlineEndpoint"]: """List Online Endpoints. List Online Endpoints. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Name of the endpoint. + :param name: Name of the endpoint. Default value is None. :type name: str - :param count: Number of endpoints to be retrieved in a page of results. + :param count: Number of endpoints to be retrieved in a page of results. Default value is None. :type count: int - :param compute_type: EndpointComputeType to be filtered by. + :param compute_type: EndpointComputeType to be filtered by. Known values are: "Managed", + "Kubernetes", and "AzureMLCompute". Default value is None. :type compute_type: str or ~azure.mgmt.machinelearningservices.models.EndpointComputeType - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: A set of tags with which to filter the returned models. It is a comma separated - string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 . + string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 . Default value + is None. :type tags: str :param properties: A set of properties with which to filter the returned models. It is a comma separated string of properties key and/or properties key=value Example: - propKey1,propKey2,propKey3=value3 . + propKey1,propKey2,propKey3=value3 . Default value is None. :type properties: str - :param order_by: The option to order the response. + :param order_by: The option to order the response. Known values are: "CreatedAtDesc", + "CreatedAtAsc", "UpdatedAtDesc", and "UpdatedAtAsc". Default value is None. :type order_by: str or ~azure.mgmt.machinelearningservices.models.OrderString :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OnlineEndpointTrackedResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either OnlineEndpoint or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.OnlineEndpointTrackedResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OnlineEndpointTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineEndpointTrackedResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, name=name, count=count, compute_type=compute_type, @@ -115,26 +142,26 @@ def prepare_request(next_link=None): tags=tags, properties=properties, order_by=order_by, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - name=name, - count=count, - compute_type=compute_type, - skip=skip, - tags=tags, - properties=properties, - order_by=order_by, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -145,16 +172,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("OnlineEndpointTrackedResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -165,80 +191,83 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete Online Endpoint (asynchronous). Delete Online Endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -250,94 +279,106 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - **kwargs: Any - ) -> "_models.OnlineEndpoint": + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.OnlineEndpoint: """Get Online Endpoint. Get Online Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: OnlineEndpoint, or the result of cls(response) + :return: OnlineEndpoint or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineEndpoint"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OnlineEndpoint] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -345,77 +386,181 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}" + } async def _update_initial( self, resource_group_name: str, workspace_name: str, endpoint_name: str, - body: "_models.PartialMinimalTrackedResourceWithIdentity", + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], **kwargs: Any - ) -> Optional["_models.OnlineEndpoint"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OnlineEndpoint"]] + ) -> Optional[_models.OnlineEndpoint]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PartialMinimalTrackedResourceWithIdentity') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.OnlineEndpoint]] = kwargs.pop("cls", None) - request = build_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithIdentity") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._update_initial.metadata['url'], + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}" + } + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineEndpoint]: + """Update Online Endpoint (asynchronous). + + Update Online Endpoint (asynchronous). + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineEndpoint]: + """Update Online Endpoint (asynchronous). + + Update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_update( @@ -423,22 +568,27 @@ async def begin_update( resource_group_name: str, workspace_name: str, endpoint_name: str, - body: "_models.PartialMinimalTrackedResourceWithIdentity", + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.OnlineEndpoint"]: + ) -> AsyncLROPoller[_models.OnlineEndpoint]: """Update Online Endpoint (asynchronous). Update Online Endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str - :param body: Online Endpoint entity to apply during operation. + :param body: Online Endpoint entity to apply during operation. Is either a + PartialMinimalTrackedResourceWithIdentity type or a IO type. Required. :type body: - ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -451,17 +601,17 @@ async def begin_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineEndpoint"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OnlineEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._update_initial( resource_group_name=resource_group_name, @@ -470,93 +620,203 @@ async def begin_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}" + } async def _create_or_update_initial( self, resource_group_name: str, workspace_name: str, endpoint_name: str, - body: "_models.OnlineEndpoint", + body: Union[_models.OnlineEndpoint, IO], **kwargs: Any - ) -> "_models.OnlineEndpoint": - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineEndpoint"] + ) -> _models.OnlineEndpoint: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'OnlineEndpoint') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OnlineEndpoint] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "OnlineEndpoint") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.OnlineEndpoint, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineEndpoint]: + """Create or update Online Endpoint (asynchronous). - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + Create or update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.OnlineEndpoint]: + """Create or update Online Endpoint (asynchronous). + + Create or update Online Endpoint (asynchronous). + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -564,21 +824,26 @@ async def begin_create_or_update( resource_group_name: str, workspace_name: str, endpoint_name: str, - body: "_models.OnlineEndpoint", + body: Union[_models.OnlineEndpoint, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.OnlineEndpoint"]: + ) -> AsyncLROPoller[_models.OnlineEndpoint]: """Create or update Online Endpoint (asynchronous). Create or update Online Endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str - :param body: Online Endpoint entity to apply during operation. - :type body: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint + :param body: Online Endpoint entity to apply during operation. Is either a OnlineEndpoint type + or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -591,17 +856,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineEndpoint"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OnlineEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -610,81 +875,93 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}" + } @distributed_trace_async async def list_keys( - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - **kwargs: Any - ) -> "_models.EndpointAuthKeys": + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.EndpointAuthKeys: """List EndpointAuthKeys for an Endpoint using Key-based authentication. List EndpointAuthKeys for an Endpoint using Key-based authentication. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EndpointAuthKeys, or the result of cls(response) + :return: EndpointAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EndpointAuthKeys"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EndpointAuthKeys] = kwargs.pop("cls", None) - request = build_list_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_keys.metadata['url'], + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -692,78 +969,175 @@ async def list_keys( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EndpointAuthKeys', pipeline_response) + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys"} # type: ignore - + list_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys" + } async def _regenerate_keys_initial( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, workspace_name: str, endpoint_name: str, - body: "_models.RegenerateEndpointKeysRequest", + body: Union[_models.RegenerateEndpointKeysRequest, IO], **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'RegenerateEndpointKeysRequest') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_regenerate_keys_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "RegenerateEndpointKeysRequest") + + request = build_regenerate_keys_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._regenerate_keys_initial.metadata['url'], + content=_content, + template_url=self._regenerate_keys_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _regenerate_keys_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys"} # type: ignore + _regenerate_keys_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys" + } + + @overload + async def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.RegenerateEndpointKeysRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: RegenerateKeys request . Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: RegenerateKeys request . Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async - async def begin_regenerate_keys( # pylint: disable=inconsistent-return-statements + async def begin_regenerate_keys( self, resource_group_name: str, workspace_name: str, endpoint_name: str, - body: "_models.RegenerateEndpointKeysRequest", + body: Union[_models.RegenerateEndpointKeysRequest, IO], **kwargs: Any ) -> AsyncLROPoller[None]: """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). @@ -771,13 +1145,18 @@ async def begin_regenerate_keys( # pylint: disable=inconsistent-return-statemen Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str - :param body: RegenerateKeys request . - :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest + :param body: RegenerateKeys request . Is either a RegenerateEndpointKeysRequest type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -788,97 +1167,109 @@ async def begin_regenerate_keys( # pylint: disable=inconsistent-return-statemen Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._regenerate_keys_initial( + raw_result = await self._regenerate_keys_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_regenerate_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys"} # type: ignore + begin_regenerate_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys" + } @distributed_trace_async async def get_token( - self, - resource_group_name: str, - workspace_name: str, - endpoint_name: str, - **kwargs: Any - ) -> "_models.EndpointAuthToken": + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.EndpointAuthToken: """Retrieve a valid AML token for an Endpoint using AMLToken-based authentication. Retrieve a valid AML token for an Endpoint using AMLToken-based authentication. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EndpointAuthToken, or the result of cls(response) + :return: EndpointAuthToken or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthToken - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EndpointAuthToken"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EndpointAuthToken] = kwargs.pop("cls", None) - request = build_get_token_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_token.metadata['url'], + template_url=self.get_token.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -886,12 +1277,13 @@ async def get_token( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EndpointAuthToken', pipeline_response) + deserialized = self._deserialize("EndpointAuthToken", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_token.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token"} # type: ignore - + get_token.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_operations.py index 89f5a9b4249b..843103c13b65 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_operations.py @@ -6,82 +6,102 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import urllib.parse from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request from ...operations._operations import build_list_request -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class Operations: - """Operations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`operations` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list( - self, - **kwargs: Any - ) -> AsyncIterable["_models.AmlOperationListResult"]: + def list(self, **kwargs: Any) -> AsyncIterable["_models.Operation"]: """Lists all of the available Azure Machine Learning Workspaces REST API operations. Lists all of the available Azure Machine Learning Workspaces REST API operations. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either AmlOperationListResult or the result of - cls(response) + :return: An iterator like instance of either Operation or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.AmlOperationListResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.AmlOperationListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -89,19 +109,18 @@ def prepare_request(next_link=None): return request async def extract_data(pipeline_response): - deserialized = self._deserialize("AmlOperationListResult", pipeline_response) + deserialized = self._deserialize("OperationListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -112,8 +131,6 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/providers/Microsoft.MachineLearningServices/operations"} # type: ignore + list.metadata = {"url": "/providers/Microsoft.MachineLearningServices/operations"} diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_patch.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_private_endpoint_connections_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_private_endpoint_connections_operations.py index 238901748a4a..c850daec4dee 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_private_endpoint_connections_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_private_endpoint_connections_operations.py @@ -6,95 +6,120 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request -from ...operations._private_endpoint_connections_operations import build_create_or_update_request, build_delete_request, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._private_endpoint_connections_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class PrivateEndpointConnectionsOperations: - """PrivateEndpointConnectionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class PrivateEndpointConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`private_endpoint_connections` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> AsyncIterable["_models.PrivateEndpointConnectionListResult"]: + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncIterable["_models.PrivateEndpointConnection"]: """Called by end-users to get all PE connections. Called by end-users to get all PE connections. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PrivateEndpointConnectionListResult or the result - of cls(response) + :return: An iterator like instance of either PrivateEndpointConnection or the result of + cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionListResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PrivateEndpointConnectionListResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -105,16 +130,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("PrivateEndpointConnectionListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -125,60 +149,64 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - private_endpoint_connection_name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any ) -> None: """Called by end-users to delete a PE connection. Called by end-users to delete a PE connection. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -189,57 +217,62 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - private_endpoint_connection_name: str, - **kwargs: Any - ) -> "_models.PrivateEndpointConnection": + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.PrivateEndpointConnection: """Called by end-users to get a PE connection. Called by end-users to get a PE connection. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateEndpointConnection, or the result of cls(response) + :return: PrivateEndpointConnection or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -247,15 +280,86 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" + } + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + body: _models.PrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. + + Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. + :type private_endpoint_connection_name: str + :param body: PrivateEndpointConnection object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. + + Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. + :type private_endpoint_connection_name: str + :param body: PrivateEndpointConnection object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def create_or_update( @@ -263,9 +367,9 @@ async def create_or_update( resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, - body: "_models.PrivateEndpointConnection", + body: Union[_models.PrivateEndpointConnection, IO], **kwargs: Any - ) -> "_models.PrivateEndpointConnection": + ) -> _models.PrivateEndpointConnection: """Called by end-users to approve or reject a PE connection. This method must validate and forward the call to NRP. @@ -273,47 +377,67 @@ async def create_or_update( This method must validate and forward the call to NRP. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. :type private_endpoint_connection_name: str - :param body: PrivateEndpointConnection object. - :type body: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :param body: PrivateEndpointConnection object. Is either a PrivateEndpointConnection type or a + IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateEndpointConnection, or the result of cls(response) + :return: PrivateEndpointConnection or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PrivateEndpointConnection') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PrivateEndpointConnection") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -321,12 +445,13 @@ async def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore - + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_private_link_resources_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_private_link_resources_operations.py index fc57952d9249..3cfbb928f6fc 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_private_link_resources_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_private_link_resources_operations.py @@ -6,51 +6,56 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import urllib.parse from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request from ...operations._private_link_resources_operations import build_list_request -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class PrivateLinkResourcesOperations: - """PrivateLinkResourcesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class PrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`private_link_resources` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> AsyncIterable["_models.PrivateLinkResourceListResult"]: + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncIterable["_models.PrivateLinkResource"]: """Called by Client (Portal, CLI, etc) to get available "private link resources" for the workspace. Each "private link resource" is a connection endpoint (IP address) to the resource. @@ -68,44 +73,57 @@ def list( Defined in the "[NRP] Private Endpoint Design" doc, topic "GET API for GroupIds". :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PrivateLinkResourceListResult or the result of - cls(response) + :return: An iterator like instance of either PrivateLinkResource or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PrivateLinkResourceListResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourceListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -116,16 +134,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -136,8 +153,8 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_quotas_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_quotas_operations.py index cbdcb997b5f3..28c7a97f92d2 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_quotas_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_quotas_operations.py @@ -6,73 +6,138 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request from ...operations._quotas_operations import build_list_request, build_update_request -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class QuotasOperations: - """QuotasOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class QuotasOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`quotas` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace_async + @overload async def update( self, location: str, - parameters: "_models.QuotaUpdateParameters", + parameters: _models.QuotaUpdateParameters, + *, + content_type: str = "application/json", **kwargs: Any - ) -> "_models.UpdateWorkspaceQuotasResult": + ) -> _models.UpdateWorkspaceQuotasResult: """Update quota for each VM family in workspace. - :param location: The location for update quota is queried. + :param location: The location for update quota is queried. Required. :type location: str - :param parameters: Quota update parameters. + :param parameters: Quota update parameters. Required. :type parameters: ~azure.mgmt.machinelearningservices.models.QuotaUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: UpdateWorkspaceQuotasResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, location: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.UpdateWorkspaceQuotasResult: + """Update quota for each VM family in workspace. + + :param location: The location for update quota is queried. Required. + :type location: str + :param parameters: Quota update parameters. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: UpdateWorkspaceQuotasResult, or the result of cls(response) + :return: UpdateWorkspaceQuotasResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def update( + self, location: str, parameters: Union[_models.QuotaUpdateParameters, IO], **kwargs: Any + ) -> _models.UpdateWorkspaceQuotasResult: + """Update quota for each VM family in workspace. + + :param location: The location for update quota is queried. Required. + :type location: str + :param parameters: Quota update parameters. Is either a QuotaUpdateParameters type or a IO + type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.QuotaUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: UpdateWorkspaceQuotasResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.UpdateWorkspaceQuotasResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.UpdateWorkspaceQuotasResult] = kwargs.pop("cls", None) - _json = self._serialize.body(parameters, 'QuotaUpdateParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "QuotaUpdateParameters") request = build_update_request( location=location, @@ -80,16 +145,19 @@ async def update( api_version=api_version, content_type=content_type, json=_json, - template_url=self.update.metadata['url'], + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -97,58 +165,69 @@ async def update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response) + deserialized = self._deserialize("UpdateWorkspaceQuotasResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas"} # type: ignore - + update.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas" + } @distributed_trace - def list( - self, - location: str, - **kwargs: Any - ) -> AsyncIterable["_models.ListWorkspaceQuotas"]: + def list(self, location: str, **kwargs: Any) -> AsyncIterable["_models.ResourceQuota"]: """Gets the currently assigned Workspace Quotas based on VMFamily. - :param location: The location for which resource usage is queried. + :param location: The location for which resource usage is queried. Required. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response) + :return: An iterator like instance of either ResourceQuota or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ListWorkspaceQuotas] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ResourceQuota] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ListWorkspaceQuotas] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListWorkspaceQuotas"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, location=location, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - location=location, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -159,16 +238,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ListWorkspaceQuotas", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -179,8 +257,8 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registries_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registries_operations.py index 67f9f3224f7a..513261907e1f 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registries_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registries_operations.py @@ -6,87 +6,115 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._registries_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_by_subscription_request, build_list_request, build_remove_regions_request_initial, build_update_request -T = TypeVar('T') +from ...operations._registries_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_by_subscription_request, + build_list_request, + build_remove_regions_request, + build_update_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class RegistriesOperations: - """RegistriesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class RegistriesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registries` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list_by_subscription( - self, - **kwargs: Any - ) -> AsyncIterable["_models.RegistryTrackedResourceArmPaginatedResult"]: + def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.Registry"]: """List registries by subscription. List registries by subscription. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either RegistryTrackedResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either Registry or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.RegistryTrackedResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.RegistryTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.RegistryTrackedResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_by_subscription_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_subscription.metadata['url'], + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_by_subscription_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -97,16 +125,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("RegistryTrackedResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -117,57 +144,67 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_by_subscription.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/registries"} # type: ignore + list_by_subscription.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/registries" + } @distributed_trace - def list( - self, - resource_group_name: str, - **kwargs: Any - ) -> AsyncIterable["_models.RegistryTrackedResourceArmPaginatedResult"]: + def list(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.Registry"]: """List registries. List registries. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either RegistryTrackedResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either Registry or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.RegistryTrackedResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.RegistryTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.RegistryTrackedResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -178,16 +215,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("RegistryTrackedResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -198,75 +234,79 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - **kwargs: Any + self, resource_group_name: str, registry_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - **kwargs: Any - ) -> AsyncLROPoller[None]: + async def begin_delete(self, resource_group_name: str, registry_name: str, **kwargs: Any) -> AsyncLROPoller[None]: """Delete registry. Delete registry. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -278,89 +318,101 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}" + } @distributed_trace_async - async def get( - self, - resource_group_name: str, - registry_name: str, - **kwargs: Any - ) -> "_models.Registry": + async def get(self, resource_group_name: str, registry_name: str, **kwargs: Any) -> _models.Registry: """Get registry. Get registry. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Registry, or the result of cls(response) + :return: Registry or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Registry - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Registry"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Registry] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -368,67 +420,152 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Registry', pipeline_response) + deserialized = self._deserialize("Registry", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}" + } + @overload + async def update( + self, + resource_group_name: str, + registry_name: str, + body: _models.PartialRegistryPartialTrackedResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Registry: + """Update tags. + + Update tags. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialRegistryPartialTrackedResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Registry or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Registry + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + registry_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Registry: + """Update tags. + + Update tags. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Registry or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Registry + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def update( self, resource_group_name: str, registry_name: str, - body: "_models.PartialRegistryPartialTrackedResource", + body: Union[_models.PartialRegistryPartialTrackedResource, IO], **kwargs: Any - ) -> "_models.Registry": + ) -> _models.Registry: """Update tags. Update tags. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param body: Details required to create the registry. - :type body: ~azure.mgmt.machinelearningservices.models.PartialRegistryPartialTrackedResource + :param body: Details required to create the registry. Is either a + PartialRegistryPartialTrackedResource type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialRegistryPartialTrackedResource or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Registry, or the result of cls(response) + :return: Registry or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Registry - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Registry"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Registry] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'PartialRegistryPartialTrackedResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialRegistryPartialTrackedResource") request = build_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update.metadata['url'], + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -436,89 +573,110 @@ async def update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Registry', pipeline_response) + deserialized = self._deserialize("Registry", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore - + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}" + } async def _create_or_update_initial( - self, - resource_group_name: str, - registry_name: str, - body: "_models.Registry", - **kwargs: Any - ) -> "_models.Registry": - cls = kwargs.pop('cls', None) # type: ClsType["_models.Registry"] + self, resource_group_name: str, registry_name: str, body: Union[_models.Registry, IO], **kwargs: Any + ) -> _models.Registry: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'Registry') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Registry] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Registry") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('Registry', pipeline_response) + deserialized = self._deserialize("Registry", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('Registry', pipeline_response) + deserialized = self._deserialize("Registry", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + return deserialized # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}" + } - @distributed_trace_async + @overload async def begin_create_or_update( self, resource_group_name: str, registry_name: str, - body: "_models.Registry", + body: _models.Registry, + *, + content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller["_models.Registry"]: + ) -> AsyncLROPoller[_models.Registry]: """Create or update registry. Create or update registry. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param body: Details required to create the registry. + :param body: Details required to create the registry. Required. :type body: ~azure.mgmt.machinelearningservices.models.Registry + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -530,17 +688,90 @@ async def begin_create_or_update( :return: An instance of AsyncLROPoller that returns either Registry or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Registry] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Registry"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Registry]: + """Create or update registry. + + Create or update registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Registry or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, resource_group_name: str, registry_name: str, body: Union[_models.Registry, IO], **kwargs: Any + ) -> AsyncLROPoller[_models.Registry]: + """Create or update registry. + + Create or update registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param body: Details required to create the registry. Is either a Registry type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.Registry or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Registry or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Registry] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -548,111 +779,140 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('Registry', pipeline_response) + deserialized = self._deserialize("Registry", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}" + } async def _remove_regions_initial( - self, - resource_group_name: str, - registry_name: str, - body: "_models.Registry", - **kwargs: Any - ) -> Optional["_models.Registry"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Registry"]] + self, resource_group_name: str, registry_name: str, body: Union[_models.Registry, IO], **kwargs: Any + ) -> Optional[_models.Registry]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'Registry') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.Registry]] = kwargs.pop("cls", None) - request = build_remove_regions_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Registry") + + request = build_remove_regions_request( resource_group_name=resource_group_name, registry_name=registry_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._remove_regions_initial.metadata['url'], + content=_content, + template_url=self._remove_regions_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('Registry', pipeline_response) + deserialized = self._deserialize("Registry", pipeline_response) if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _remove_regions_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/removeRegions"} # type: ignore + _remove_regions_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/removeRegions" + } - - @distributed_trace_async + @overload async def begin_remove_regions( self, resource_group_name: str, registry_name: str, - body: "_models.Registry", + body: _models.Registry, + *, + content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller["_models.Registry"]: + ) -> AsyncLROPoller[_models.Registry]: """Remove regions from registry. Remove regions from registry. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param body: Details required to create the registry. + :param body: Details required to create the registry. Required. :type body: ~azure.mgmt.machinelearningservices.models.Registry + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -664,17 +924,90 @@ async def begin_remove_regions( :return: An instance of AsyncLROPoller that returns either Registry or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Registry] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Registry"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + async def begin_remove_regions( + self, + resource_group_name: str, + registry_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Registry]: + """Remove regions from registry. + + Remove regions from registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Registry or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_remove_regions( + self, resource_group_name: str, registry_name: str, body: Union[_models.Registry, IO], **kwargs: Any + ) -> AsyncLROPoller[_models.Registry]: + """Remove regions from registry. + + Remove regions from registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param body: Details required to create the registry. Is either a Registry type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.Registry or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Registry or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Registry] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._remove_regions_initial( resource_group_name=resource_group_name, @@ -682,29 +1015,36 @@ async def begin_remove_regions( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('Registry', pipeline_response) + deserialized = self._deserialize("Registry", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_remove_regions.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/removeRegions"} # type: ignore + begin_remove_regions.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/removeRegions" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_code_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_code_containers_operations.py index a7976b446a01..f2fd80c61b69 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_code_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_code_containers_operations.py @@ -6,102 +6,125 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._registry_code_containers_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._registry_code_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class RegistryCodeContainersOperations: - """RegistryCodeContainersOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class RegistryCodeContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_code_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name: str, - registry_name: str, - skip: Optional[str] = None, - **kwargs: Any - ) -> AsyncIterable["_models.CodeContainerResourceArmPaginatedResult"]: + self, resource_group_name: str, registry_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.CodeContainer"]: """List containers. List containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either CodeContainerResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either CodeContainer or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - api_version=api_version, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -112,16 +135,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("CodeContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -132,80 +154,84 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - code_name: str, - **kwargs: Any + self, resource_group_name: str, registry_name: str, code_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - code_name: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, registry_name: str, code_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete Code container. Delete Code container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Container name. + :param code_name: Container name. Required. :type code_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -217,94 +243,107 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - registry_name: str, - code_name: str, - **kwargs: Any - ) -> "_models.CodeContainer": + self, resource_group_name: str, registry_name: str, code_name: str, **kwargs: Any + ) -> _models.CodeContainer: """Get Code container. Get Code container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Container name. + :param code_name: Container name. Required. :type code_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: CodeContainer, or the result of cls(response) + :return: CodeContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -312,76 +351,184 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('CodeContainer', pipeline_response) + deserialized = self._deserialize("CodeContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}" + } async def _create_or_update_initial( self, resource_group_name: str, registry_name: str, code_name: str, - body: "_models.CodeContainer", + body: Union[_models.CodeContainer, IO], **kwargs: Any - ) -> "_models.CodeContainer": - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainer"] + ) -> _models.CodeContainer: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'CodeContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CodeContainer] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeContainer") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('CodeContainer', pipeline_response) + deserialized = self._deserialize("CodeContainer", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('CodeContainer', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("CodeContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + body: _models.CodeContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CodeContainer]: + """Create or update Code container. + + Create or update Code container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CodeContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CodeContainer]: + """Create or update Code container. + + Create or update Code container. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CodeContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -389,21 +536,27 @@ async def begin_create_or_update( resource_group_name: str, registry_name: str, code_name: str, - body: "_models.CodeContainer", + body: Union[_models.CodeContainer, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.CodeContainer"]: + ) -> AsyncLROPoller[_models.CodeContainer]: """Create or update Code container. Create or update Code container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Container name. + :param code_name: Container name. Required. :type code_name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer + :param body: Container entity to create or update. Is either a CodeContainer type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -416,17 +569,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.CodeContainer] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainer"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CodeContainer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -435,29 +588,37 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('CodeContainer', pipeline_response) + deserialized = self._deserialize("CodeContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_code_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_code_versions_operations.py index b9b13890f351..412afb313eb9 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_code_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_code_versions_operations.py @@ -6,46 +6,61 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._registry_code_versions_operations import build_create_or_get_start_pending_upload_request, build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._registry_code_versions_operations import ( + build_create_or_get_start_pending_upload_request, + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class RegistryCodeVersionsOperations: - """RegistryCodeVersionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class RegistryCodeVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_code_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -57,66 +72,76 @@ def list( top: Optional[int] = None, skip: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.CodeVersionResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.CodeVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Container name. + :param code_name: Container name. Required. :type code_name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either CodeVersionResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either CodeVersion or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - code_name=code_name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -127,16 +152,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("CodeVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -147,85 +171,87 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - code_name: str, - version: str, - **kwargs: Any + self, resource_group_name: str, registry_name: str, code_name: str, version: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - code_name: str, - version: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, registry_name: str, code_name: str, version: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Container name. + :param code_name: Container name. Required. :type code_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -237,99 +263,111 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, version=version, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - registry_name: str, - code_name: str, - version: str, - **kwargs: Any - ) -> "_models.CodeVersion": + self, resource_group_name: str, registry_name: str, code_name: str, version: str, **kwargs: Any + ) -> _models.CodeVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Container name. + :param code_name: Container name. Required. :type code_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: CodeVersion, or the result of cls(response) + :return: CodeVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -337,15 +375,16 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('CodeVersion', pipeline_response) + deserialized = self._deserialize("CodeVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}" + } async def _create_or_update_initial( self, @@ -353,62 +392,175 @@ async def _create_or_update_initial( registry_name: str, code_name: str, version: str, - body: "_models.CodeVersion", + body: Union[_models.CodeVersion, IO], **kwargs: Any - ) -> "_models.CodeVersion": - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersion"] + ) -> _models.CodeVersion: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'CodeVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CodeVersion] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeVersion") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('CodeVersion', pipeline_response) + deserialized = self._deserialize("CodeVersion", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('CodeVersion', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("CodeVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: _models.CodeVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CodeVersion]: + """Create or update version. + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CodeVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.CodeVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either CodeVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -417,23 +569,29 @@ async def begin_create_or_update( registry_name: str, code_name: str, version: str, - body: "_models.CodeVersion", + body: Union[_models.CodeVersion, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.CodeVersion"]: + ) -> AsyncLROPoller[_models.CodeVersion]: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Container name. + :param code_name: Container name. Required. :type code_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion + :param body: Version entity to create or update. Is either a CodeVersion type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -446,17 +604,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.CodeVersion] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersion"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CodeVersion] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -466,92 +624,195 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('CodeVersion', pipeline_response) + deserialized = self._deserialize("CodeVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}" + } - @distributed_trace_async + @overload async def create_or_get_start_pending_upload( self, resource_group_name: str, registry_name: str, code_name: str, version: str, - body: "_models.PendingUploadRequestDto", + body: _models.PendingUploadRequestDto, + *, + content_type: str = "application/json", **kwargs: Any - ) -> "_models.PendingUploadResponseDto": + ) -> _models.PendingUploadResponseDto: """Generate a storage location and credential for the client to upload a code asset to. Generate a storage location and credential for the client to upload a code asset to. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Pending upload name. This is case-sensitive. + :param code_name: Pending upload name. This is case-sensitive. Required. :type code_name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Pending upload request object. + :param body: Pending upload request object. Required. :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PendingUploadResponseDto, or the result of cls(response) + :return: PendingUploadResponseDto or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_get_start_pending_upload( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a code asset to. + + Generate a storage location and credential for the client to upload a code asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param code_name: Pending upload name. This is case-sensitive. Required. + :type code_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_get_start_pending_upload( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: Union[_models.PendingUploadRequestDto, IO], + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a code asset to. + + Generate a storage location and credential for the client to upload a code asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param code_name: Pending upload name. This is case-sensitive. Required. + :type code_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Is either a PendingUploadRequestDto type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PendingUploadResponseDto"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PendingUploadRequestDto') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PendingUploadResponseDto] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PendingUploadRequestDto") request = build_create_or_get_start_pending_upload_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_get_start_pending_upload.metadata['url'], + content=_content, + template_url=self.create_or_get_start_pending_upload.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -559,12 +820,13 @@ async def create_or_get_start_pending_upload( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PendingUploadResponseDto', pipeline_response) + deserialized = self._deserialize("PendingUploadResponseDto", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_get_start_pending_upload.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}/startPendingUpload"} # type: ignore - + create_or_get_start_pending_upload.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}/startPendingUpload" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_component_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_component_containers_operations.py index 752789358531..8892ef5e9303 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_component_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_component_containers_operations.py @@ -6,102 +6,125 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._registry_component_containers_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._registry_component_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class RegistryComponentContainersOperations: - """RegistryComponentContainersOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class RegistryComponentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_component_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name: str, - registry_name: str, - skip: Optional[str] = None, - **kwargs: Any - ) -> AsyncIterable["_models.ComponentContainerResourceArmPaginatedResult"]: + self, resource_group_name: str, registry_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.ComponentContainer"]: """List containers. List containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ComponentContainerResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either ComponentContainer or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComponentContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - api_version=api_version, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -112,16 +135,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ComponentContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -132,80 +154,84 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - component_name: str, - **kwargs: Any + self, resource_group_name: str, registry_name: str, component_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - component_name: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, registry_name: str, component_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param component_name: Container name. + :param component_name: Container name. Required. :type component_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -217,94 +243,107 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - registry_name: str, - component_name: str, - **kwargs: Any - ) -> "_models.ComponentContainer": + self, resource_group_name: str, registry_name: str, component_name: str, **kwargs: Any + ) -> _models.ComponentContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param component_name: Container name. + :param component_name: Container name. Required. :type component_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComponentContainer, or the result of cls(response) + :return: ComponentContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -312,76 +351,184 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ComponentContainer', pipeline_response) + deserialized = self._deserialize("ComponentContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}" + } async def _create_or_update_initial( self, resource_group_name: str, registry_name: str, component_name: str, - body: "_models.ComponentContainer", + body: Union[_models.ComponentContainer, IO], **kwargs: Any - ) -> "_models.ComponentContainer": - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentContainer"] + ) -> _models.ComponentContainer: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ComponentContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComponentContainer] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentContainer") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ComponentContainer', pipeline_response) + deserialized = self._deserialize("ComponentContainer", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('ComponentContainer', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ComponentContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + body: _models.ComponentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComponentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComponentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComponentContainer]: + """Create or update container. + + Create or update container. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComponentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -389,21 +536,27 @@ async def begin_create_or_update( resource_group_name: str, registry_name: str, component_name: str, - body: "_models.ComponentContainer", + body: Union[_models.ComponentContainer, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.ComponentContainer"]: + ) -> AsyncLROPoller[_models.ComponentContainer]: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param component_name: Container name. + :param component_name: Container name. Required. :type component_name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :param body: Container entity to create or update. Is either a ComponentContainer type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -416,17 +569,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComponentContainer] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentContainer"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComponentContainer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -435,29 +588,37 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ComponentContainer', pipeline_response) + deserialized = self._deserialize("ComponentContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_component_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_component_versions_operations.py index 295f0c1d0bcc..7f2d6ccc589c 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_component_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_component_versions_operations.py @@ -6,46 +6,60 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._registry_component_versions_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._registry_component_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class RegistryComponentVersionsOperations: - """RegistryComponentVersionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class RegistryComponentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_component_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -58,70 +72,79 @@ def list( skip: Optional[str] = None, stage: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.ComponentVersionResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.ComponentVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param component_name: Container name. + :param component_name: Container name. Required. :type component_name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param stage: Component stage. + :param stage: Component stage. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ComponentVersionResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either ComponentVersion or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComponentVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - component_name=component_name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -132,16 +155,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ComponentVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -152,85 +174,87 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - component_name: str, - version: str, - **kwargs: Any + self, resource_group_name: str, registry_name: str, component_name: str, version: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - component_name: str, - version: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, registry_name: str, component_name: str, version: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param component_name: Container name. + :param component_name: Container name. Required. :type component_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -242,99 +266,111 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, version=version, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - registry_name: str, - component_name: str, - version: str, - **kwargs: Any - ) -> "_models.ComponentVersion": + self, resource_group_name: str, registry_name: str, component_name: str, version: str, **kwargs: Any + ) -> _models.ComponentVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param component_name: Container name. + :param component_name: Container name. Required. :type component_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComponentVersion, or the result of cls(response) + :return: ComponentVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -342,15 +378,16 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ComponentVersion', pipeline_response) + deserialized = self._deserialize("ComponentVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}" + } async def _create_or_update_initial( self, @@ -358,62 +395,175 @@ async def _create_or_update_initial( registry_name: str, component_name: str, version: str, - body: "_models.ComponentVersion", + body: Union[_models.ComponentVersion, IO], **kwargs: Any - ) -> "_models.ComponentVersion": - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentVersion"] + ) -> _models.ComponentVersion: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ComponentVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComponentVersion] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentVersion") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ComponentVersion', pipeline_response) + deserialized = self._deserialize("ComponentVersion", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('ComponentVersion', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ComponentVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}" + } + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + version: str, + body: _models.ComponentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComponentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComponentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ComponentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ComponentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -422,23 +572,29 @@ async def begin_create_or_update( registry_name: str, component_name: str, version: str, - body: "_models.ComponentVersion", + body: Union[_models.ComponentVersion, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.ComponentVersion"]: + ) -> AsyncLROPoller[_models.ComponentVersion]: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param component_name: Container name. + :param component_name: Container name. Required. :type component_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :param body: Version entity to create or update. Is either a ComponentVersion type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -451,17 +607,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ComponentVersion] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentVersion"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComponentVersion] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -471,29 +627,37 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ComponentVersion', pipeline_response) + deserialized = self._deserialize("ComponentVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_data_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_data_containers_operations.py index f3d04e67a3c4..19360e12136b 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_data_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_data_containers_operations.py @@ -6,46 +6,60 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._registry_data_containers_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._registry_data_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class RegistryDataContainersOperations: - """RegistryDataContainersOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class RegistryDataContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_data_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -53,60 +67,73 @@ def list( resource_group_name: str, registry_name: str, skip: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, **kwargs: Any - ) -> AsyncIterable["_models.DataContainerResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.DataContainer"]: """List Data containers. List Data containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataContainerResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either DataContainer or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.DataContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.DataContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - api_version=api_version, - skip=skip, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -117,16 +144,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("DataContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -137,80 +163,84 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - name: str, - **kwargs: Any + self, resource_group_name: str, registry_name: str, name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - name: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, registry_name: str, name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -222,94 +252,107 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, name=name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - registry_name: str, - name: str, - **kwargs: Any - ) -> "_models.DataContainer": + self, resource_group_name: str, registry_name: str, name: str, **kwargs: Any + ) -> _models.DataContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataContainer, or the result of cls(response) + :return: DataContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -317,76 +360,184 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('DataContainer', pipeline_response) + deserialized = self._deserialize("DataContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}" + } async def _create_or_update_initial( self, resource_group_name: str, registry_name: str, name: str, - body: "_models.DataContainer", + body: Union[_models.DataContainer, IO], **kwargs: Any - ) -> "_models.DataContainer": - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataContainer"] + ) -> _models.DataContainer: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'DataContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataContainer] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DataContainer") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('DataContainer', pipeline_response) + deserialized = self._deserialize("DataContainer", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('DataContainer', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("DataContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}" + } + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + name: str, + body: _models.DataContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DataContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DataContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.DataContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DataContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DataContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.DataContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -394,21 +545,27 @@ async def begin_create_or_update( resource_group_name: str, registry_name: str, name: str, - body: "_models.DataContainer", + body: Union[_models.DataContainer, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.DataContainer"]: + ) -> AsyncLROPoller[_models.DataContainer]: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.DataContainer + :param body: Container entity to create or update. Is either a DataContainer type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -421,17 +578,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.DataContainer] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataContainer"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataContainer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -440,29 +597,37 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('DataContainer', pipeline_response) + deserialized = self._deserialize("DataContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_data_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_data_versions_operations.py index 8e594e18e8dc..bd9499e08e6c 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_data_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_data_versions_operations.py @@ -6,46 +6,61 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._registry_data_versions_operations import build_create_or_get_start_pending_upload_request, build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._registry_data_versions_operations import ( + build_create_or_get_start_pending_upload_request, + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class RegistryDataVersionsOperations: - """RegistryDataVersionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class RegistryDataVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_data_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -57,80 +72,90 @@ def list( top: Optional[int] = None, skip: Optional[str] = None, tags: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, **kwargs: Any - ) -> AsyncIterable["_models.DataVersionBaseResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.DataVersionBase"]: """List data versions in the data container. List data versions in the data container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Data container's name. + :param name: Data container's name. Required. :type name: str - :param order_by: Please choose OrderBy value from ['createdtime', 'modifiedtime']. + :param order_by: Please choose OrderBy value from ['createdtime', 'modifiedtime']. Default + value is None. :type order_by: str :param top: Top count of results, top count cannot be greater than the page size. If topCount > page size, results with be default page size count - will be returned. + will be returned. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, - ListViewType.All]View type for including/excluding (for example) archived entities. + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataVersionBaseResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either DataVersionBase or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.DataVersionBaseResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.DataVersionBase] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataVersionBaseResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataVersionBaseResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, tags=tags, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - name=name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - tags=tags, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -141,16 +166,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("DataVersionBaseResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -161,85 +185,87 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - name: str, - version: str, - **kwargs: Any + self, resource_group_name: str, registry_name: str, name: str, version: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - name: str, - version: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, registry_name: str, name: str, version: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -251,99 +277,111 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, name=name, version=version, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - registry_name: str, - name: str, - version: str, - **kwargs: Any - ) -> "_models.DataVersionBase": + self, resource_group_name: str, registry_name: str, name: str, version: str, **kwargs: Any + ) -> _models.DataVersionBase: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataVersionBase, or the result of cls(response) + :return: DataVersionBase or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataVersionBase"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataVersionBase] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -351,15 +389,16 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('DataVersionBase', pipeline_response) + deserialized = self._deserialize("DataVersionBase", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}" + } async def _create_or_update_initial( self, @@ -367,62 +406,175 @@ async def _create_or_update_initial( registry_name: str, name: str, version: str, - body: "_models.DataVersionBase", + body: Union[_models.DataVersionBase, IO], **kwargs: Any - ) -> "_models.DataVersionBase": - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataVersionBase"] + ) -> _models.DataVersionBase: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'DataVersionBase') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataVersionBase] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DataVersionBase") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('DataVersionBase', pipeline_response) + deserialized = self._deserialize("DataVersionBase", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('DataVersionBase', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("DataVersionBase", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}" + } - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}"} # type: ignore + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + name: str, + version: str, + body: _models.DataVersionBase, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DataVersionBase]: + """Create or update version. + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DataVersionBase or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.DataVersionBase] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DataVersionBase]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DataVersionBase or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.DataVersionBase] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -431,23 +583,29 @@ async def begin_create_or_update( registry_name: str, name: str, version: str, - body: "_models.DataVersionBase", + body: Union[_models.DataVersionBase, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.DataVersionBase"]: + ) -> AsyncLROPoller[_models.DataVersionBase]: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :param body: Version entity to create or update. Is either a DataVersionBase type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -460,17 +618,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.DataVersionBase] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataVersionBase"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataVersionBase] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -480,92 +638,195 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('DataVersionBase', pipeline_response) + deserialized = self._deserialize("DataVersionBase", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}" + } - @distributed_trace_async + @overload async def create_or_get_start_pending_upload( self, resource_group_name: str, registry_name: str, name: str, version: str, - body: "_models.PendingUploadRequestDto", + body: _models.PendingUploadRequestDto, + *, + content_type: str = "application/json", **kwargs: Any - ) -> "_models.PendingUploadResponseDto": + ) -> _models.PendingUploadResponseDto: """Generate a storage location and credential for the client to upload a data asset to. Generate a storage location and credential for the client to upload a data asset to. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Data asset name. This is case-sensitive. + :param name: Data asset name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Pending upload request object. + :param body: Pending upload request object. Required. :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_get_start_pending_upload( + self, + resource_group_name: str, + registry_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a data asset to. + + Generate a storage location and credential for the client to upload a data asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param name: Data asset name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_get_start_pending_upload( + self, + resource_group_name: str, + registry_name: str, + name: str, + version: str, + body: Union[_models.PendingUploadRequestDto, IO], + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a data asset to. + + Generate a storage location and credential for the client to upload a data asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param name: Data asset name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Is either a PendingUploadRequestDto type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PendingUploadResponseDto, or the result of cls(response) + :return: PendingUploadResponseDto or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PendingUploadResponseDto"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PendingUploadRequestDto') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PendingUploadResponseDto] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PendingUploadRequestDto") request = build_create_or_get_start_pending_upload_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_get_start_pending_upload.metadata['url'], + content=_content, + template_url=self.create_or_get_start_pending_upload.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -573,12 +834,13 @@ async def create_or_get_start_pending_upload( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PendingUploadResponseDto', pipeline_response) + deserialized = self._deserialize("PendingUploadResponseDto", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_get_start_pending_upload.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}/startPendingUpload"} # type: ignore - + create_or_get_start_pending_upload.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}/startPendingUpload" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_environment_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_environment_containers_operations.py index 850199c61975..fb72152812ac 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_environment_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_environment_containers_operations.py @@ -6,46 +6,60 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._registry_environment_containers_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._registry_environment_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class RegistryEnvironmentContainersOperations: - """RegistryEnvironmentContainersOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class RegistryEnvironmentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_environment_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -53,60 +67,74 @@ def list( resource_group_name: str, registry_name: str, skip: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, **kwargs: Any - ) -> AsyncIterable["_models.EnvironmentContainerResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.EnvironmentContainer"]: """List environment containers. List environment containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either EnvironmentContainerResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either EnvironmentContainer or the result of + cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - api_version=api_version, - skip=skip, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -117,16 +145,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("EnvironmentContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -137,80 +164,84 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - environment_name: str, - **kwargs: Any + self, resource_group_name: str, registry_name: str, environment_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - environment_name: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, registry_name: str, environment_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param environment_name: Container name. + :param environment_name: Container name. Required. :type environment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -222,94 +253,107 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - registry_name: str, - environment_name: str, - **kwargs: Any - ) -> "_models.EnvironmentContainer": + self, resource_group_name: str, registry_name: str, environment_name: str, **kwargs: Any + ) -> _models.EnvironmentContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param environment_name: Container name. This is case-sensitive. + :param environment_name: Container name. This is case-sensitive. Required. :type environment_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EnvironmentContainer, or the result of cls(response) + :return: EnvironmentContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -317,76 +361,184 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}" + } async def _create_or_update_initial( self, resource_group_name: str, registry_name: str, environment_name: str, - body: "_models.EnvironmentContainer", + body: Union[_models.EnvironmentContainer, IO], **kwargs: Any - ) -> "_models.EnvironmentContainer": - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentContainer"] + ) -> _models.EnvironmentContainer: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'EnvironmentContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EnvironmentContainer] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentContainer") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}" + } + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + body: _models.EnvironmentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.EnvironmentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EnvironmentContainer or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.EnvironmentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EnvironmentContainer or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -394,21 +546,27 @@ async def begin_create_or_update( resource_group_name: str, registry_name: str, environment_name: str, - body: "_models.EnvironmentContainer", + body: Union[_models.EnvironmentContainer, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.EnvironmentContainer"]: + ) -> AsyncLROPoller[_models.EnvironmentContainer]: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param environment_name: Container name. + :param environment_name: Container name. Required. :type environment_name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :param body: Container entity to create or update. Is either a EnvironmentContainer type or a + IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -421,17 +579,17 @@ async def begin_create_or_update( of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentContainer"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EnvironmentContainer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -440,29 +598,37 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_environment_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_environment_versions_operations.py index a7a29c7ad4f4..aae5b814cbb6 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_environment_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_environment_versions_operations.py @@ -6,46 +6,60 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._registry_environment_versions_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._registry_environment_versions_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class RegistryEnvironmentVersionsOperations: - """RegistryEnvironmentVersionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class RegistryEnvironmentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_environment_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -56,78 +70,87 @@ def list( order_by: Optional[str] = None, top: Optional[int] = None, skip: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, stage: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.EnvironmentVersionResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.EnvironmentVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param environment_name: Container name. This is case-sensitive. + :param environment_name: Container name. This is case-sensitive. Required. :type environment_name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :param stage: Stage for including/excluding (for example) archived entities. Takes priority - over listViewType. + over listViewType. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either EnvironmentVersionResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either EnvironmentVersion or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, list_view_type=list_view_type, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - environment_name=environment_name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - list_view_type=list_view_type, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -138,16 +161,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("EnvironmentVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -158,85 +180,87 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - environment_name: str, - version: str, - **kwargs: Any + self, resource_group_name: str, registry_name: str, environment_name: str, version: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - environment_name: str, - version: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, registry_name: str, environment_name: str, version: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param environment_name: Container name. + :param environment_name: Container name. Required. :type environment_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -248,99 +272,111 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, version=version, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - registry_name: str, - environment_name: str, - version: str, - **kwargs: Any - ) -> "_models.EnvironmentVersion": + self, resource_group_name: str, registry_name: str, environment_name: str, version: str, **kwargs: Any + ) -> _models.EnvironmentVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param environment_name: Container name. This is case-sensitive. + :param environment_name: Container name. This is case-sensitive. Required. :type environment_name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EnvironmentVersion, or the result of cls(response) + :return: EnvironmentVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -348,15 +384,16 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}" + } async def _create_or_update_initial( self, @@ -364,62 +401,175 @@ async def _create_or_update_initial( registry_name: str, environment_name: str, version: str, - body: "_models.EnvironmentVersion", + body: Union[_models.EnvironmentVersion, IO], **kwargs: Any - ) -> "_models.EnvironmentVersion": - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentVersion"] + ) -> _models.EnvironmentVersion: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'EnvironmentVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EnvironmentVersion] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentVersion") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + body: _models.EnvironmentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.EnvironmentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EnvironmentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.EnvironmentVersion]: + """Create or update version. + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EnvironmentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -428,23 +578,29 @@ async def begin_create_or_update( registry_name: str, environment_name: str, version: str, - body: "_models.EnvironmentVersion", + body: Union[_models.EnvironmentVersion, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.EnvironmentVersion"]: + ) -> AsyncLROPoller[_models.EnvironmentVersion]: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param environment_name: Container name. + :param environment_name: Container name. Required. :type environment_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :param body: Version entity to create or update. Is either a EnvironmentVersion type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -457,17 +613,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentVersion"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EnvironmentVersion] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -477,29 +633,37 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_model_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_model_containers_operations.py index ceadc7c24b8a..695ece92624d 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_model_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_model_containers_operations.py @@ -6,46 +6,60 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._registry_model_containers_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._registry_model_containers_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class RegistryModelContainersOperations: - """RegistryModelContainersOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class RegistryModelContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_model_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -53,60 +67,73 @@ def list( resource_group_name: str, registry_name: str, skip: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, **kwargs: Any - ) -> AsyncIterable["_models.ModelContainerResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.ModelContainer"]: """List model containers. List model containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ModelContainerResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either ModelContainer or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ModelContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - api_version=api_version, - skip=skip, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -117,16 +144,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ModelContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -137,80 +163,84 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - model_name: str, - **kwargs: Any + self, resource_group_name: str, registry_name: str, model_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - model_name: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, registry_name: str, model_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. + :param model_name: Container name. Required. :type model_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -222,94 +252,107 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - registry_name: str, - model_name: str, - **kwargs: Any - ) -> "_models.ModelContainer": + self, resource_group_name: str, registry_name: str, model_name: str, **kwargs: Any + ) -> _models.ModelContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. This is case-sensitive. + :param model_name: Container name. This is case-sensitive. Required. :type model_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ModelContainer, or the result of cls(response) + :return: ModelContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -317,76 +360,184 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ModelContainer', pipeline_response) + deserialized = self._deserialize("ModelContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}" + } async def _create_or_update_initial( self, resource_group_name: str, registry_name: str, model_name: str, - body: "_models.ModelContainer", + body: Union[_models.ModelContainer, IO], **kwargs: Any - ) -> "_models.ModelContainer": - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelContainer"] + ) -> _models.ModelContainer: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ModelContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ModelContainer] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelContainer") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ModelContainer', pipeline_response) + deserialized = self._deserialize("ModelContainer", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('ModelContainer', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ModelContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}" + } + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + body: _models.ModelContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ModelContainer]: + """Create or update model container. + + Create or update model container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ModelContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ModelContainer]: + """Create or update model container. + + Create or update model container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ModelContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -394,21 +545,27 @@ async def begin_create_or_update( resource_group_name: str, registry_name: str, model_name: str, - body: "_models.ModelContainer", + body: Union[_models.ModelContainer, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.ModelContainer"]: + ) -> AsyncLROPoller[_models.ModelContainer]: """Create or update model container. Create or update model container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. + :param model_name: Container name. Required. :type model_name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer + :param body: Container entity to create or update. Is either a ModelContainer type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -421,17 +578,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ModelContainer] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelContainer"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ModelContainer] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -440,29 +597,37 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ModelContainer', pipeline_response) + deserialized = self._deserialize("ModelContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_model_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_model_versions_operations.py index 0b28a2fe9613..c2bd4286e9ca 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_model_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_registry_model_versions_operations.py @@ -6,46 +6,62 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._registry_model_versions_operations import build_create_or_get_start_pending_upload_request, build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request, build_package_request_initial -T = TypeVar('T') +from ...operations._registry_model_versions_operations import ( + build_create_or_get_start_pending_upload_request, + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, + build_package_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class RegistryModelVersionsOperations: - """RegistryModelVersionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class RegistryModelVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`registry_model_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -60,60 +76,68 @@ def list( description: Optional[str] = None, tags: Optional[str] = None, properties: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, **kwargs: Any - ) -> AsyncIterable["_models.ModelVersionResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.ModelVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. This is case-sensitive. + :param model_name: Container name. This is case-sensitive. Required. :type model_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param version: Version identifier. + :param version: Version identifier. Default value is None. :type version: str - :param description: Model description. + :param description: Model description. Default value is None. :type description: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param properties: Comma-separated list of property names (and optionally values). Example: - prop1,prop2=value2. + prop1,prop2=value2. Default value is None. :type properties: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ModelVersionResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either ModelVersion or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, order_by=order_by, top=top, @@ -122,28 +146,26 @@ def prepare_request(next_link=None): tags=tags, properties=properties, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - model_name=model_name, - api_version=api_version, - skip=skip, - order_by=order_by, - top=top, - version=version, - description=description, - tags=tags, - properties=properties, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -154,16 +176,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ModelVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -174,85 +195,87 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - model_name: str, - version: str, - **kwargs: Any + self, resource_group_name: str, registry_name: str, model_name: str, version: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - registry_name: str, - model_name: str, - version: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, registry_name: str, model_name: str, version: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. + :param model_name: Container name. Required. :type model_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -264,99 +287,111 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, version=version, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - registry_name: str, - model_name: str, - version: str, - **kwargs: Any - ) -> "_models.ModelVersion": + self, resource_group_name: str, registry_name: str, model_name: str, version: str, **kwargs: Any + ) -> _models.ModelVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. This is case-sensitive. + :param model_name: Container name. This is case-sensitive. Required. :type model_name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ModelVersion, or the result of cls(response) + :return: ModelVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -364,15 +399,16 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ModelVersion', pipeline_response) + deserialized = self._deserialize("ModelVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}" + } async def _create_or_update_initial( self, @@ -380,62 +416,175 @@ async def _create_or_update_initial( registry_name: str, model_name: str, version: str, - body: "_models.ModelVersion", + body: Union[_models.ModelVersion, IO], **kwargs: Any - ) -> "_models.ModelVersion": - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelVersion"] + ) -> _models.ModelVersion: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ModelVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ModelVersion] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelVersion") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ModelVersion', pipeline_response) + deserialized = self._deserialize("ModelVersion", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('ModelVersion', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ModelVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}" + } + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: _models.ModelVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ModelVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ModelVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ModelVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ModelVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -444,23 +593,29 @@ async def begin_create_or_update( registry_name: str, model_name: str, version: str, - body: "_models.ModelVersion", + body: Union[_models.ModelVersion, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.ModelVersion"]: + ) -> AsyncLROPoller[_models.ModelVersion]: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. + :param model_name: Container name. Required. :type model_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion + :param body: Version entity to create or update. Is either a ModelVersion type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -473,17 +628,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ModelVersion] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelVersion"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ModelVersion] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -493,32 +648,40 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ModelVersion', pipeline_response) + deserialized = self._deserialize("ModelVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}" + } async def _package_initial( self, @@ -526,62 +689,170 @@ async def _package_initial( registry_name: str, model_name: str, version: str, - body: "_models.PackageRequest", + body: Union[_models.PackageRequest, IO], **kwargs: Any - ) -> Optional["_models.PackageResponse"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.PackageResponse"]] + ) -> Optional[_models.PackageResponse]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PackageRequest') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.PackageResponse]] = kwargs.pop("cls", None) - request = build_package_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PackageRequest") + + request = build_package_request( resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._package_initial.metadata['url'], + content=_content, + template_url=self._package_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('PackageResponse', pipeline_response) + deserialized = self._deserialize("PackageResponse", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _package_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package"} # type: ignore + _package_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package" + } + + @overload + async def begin_package( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: _models.PackageRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_package( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.PackageResponse]: + """Model Version Package operation. + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_package( @@ -590,23 +861,29 @@ async def begin_package( registry_name: str, model_name: str, version: str, - body: "_models.PackageRequest", + body: Union[_models.PackageRequest, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.PackageResponse"]: + ) -> AsyncLROPoller[_models.PackageResponse]: """Model Version Package operation. Model Version Package operation. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. This is case-sensitive. + :param model_name: Container name. This is case-sensitive. Required. :type model_name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Package operation request body. - :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest + :param body: Package operation request body. Is either a PackageRequest type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -619,17 +896,17 @@ async def begin_package( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.PackageResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PackageResponse] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._package_initial( resource_group_name=resource_group_name, @@ -639,92 +916,194 @@ async def begin_package( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('PackageResponse', pipeline_response) + deserialized = self._deserialize("PackageResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_package.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package"} # type: ignore + begin_package.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package" + } - @distributed_trace_async + @overload async def create_or_get_start_pending_upload( self, resource_group_name: str, registry_name: str, model_name: str, version: str, - body: "_models.PendingUploadRequestDto", + body: _models.PendingUploadRequestDto, + *, + content_type: str = "application/json", **kwargs: Any - ) -> "_models.PendingUploadResponseDto": + ) -> _models.PendingUploadResponseDto: """Generate a storage location and credential for the client to upload a model asset to. Generate a storage location and credential for the client to upload a model asset to. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Model name. This is case-sensitive. + :param model_name: Model name. This is case-sensitive. Required. :type model_name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Pending upload request object. + :param body: Pending upload request object. Required. :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PendingUploadResponseDto, or the result of cls(response) + :return: PendingUploadResponseDto or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create_or_get_start_pending_upload( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a model asset to. + + Generate a storage location and credential for the client to upload a model asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Model name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def create_or_get_start_pending_upload( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: Union[_models.PendingUploadRequestDto, IO], + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a model asset to. + + Generate a storage location and credential for the client to upload a model asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Model name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Is either a PendingUploadRequestDto type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PendingUploadResponseDto"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PendingUploadResponseDto] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'PendingUploadRequestDto') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PendingUploadRequestDto") request = build_create_or_get_start_pending_upload_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_get_start_pending_upload.metadata['url'], + content=_content, + template_url=self.create_or_get_start_pending_upload.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -732,12 +1111,13 @@ async def create_or_get_start_pending_upload( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PendingUploadResponseDto', pipeline_response) + deserialized = self._deserialize("PendingUploadResponseDto", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_get_start_pending_upload.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/startPendingUpload"} # type: ignore - + create_or_get_start_pending_upload.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/startPendingUpload" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_schedules_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_schedules_operations.py index 0c5772599e20..f79062709c56 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_schedules_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_schedules_operations.py @@ -6,46 +6,60 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._schedules_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_request -T = TypeVar('T') +from ...operations._schedules_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class SchedulesOperations: - """SchedulesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class SchedulesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`schedules` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -53,60 +67,72 @@ def list( resource_group_name: str, workspace_name: str, skip: Optional[str] = None, - list_view_type: Optional[Union[str, "_models.ScheduleListViewType"]] = None, + list_view_type: Optional[Union[str, _models.ScheduleListViewType]] = None, **kwargs: Any - ) -> AsyncIterable["_models.ScheduleResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.Schedule"]: """List schedules in specified workspace. List schedules in specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: Status filter for schedule. + :param list_view_type: Status filter for schedule. Known values are: "EnabledOnly", + "DisabledOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ScheduleResourceArmPaginatedResult or the result - of cls(response) + :return: An iterator like instance of either Schedule or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ScheduleResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Schedule] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ScheduleResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ScheduleResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -117,16 +143,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ScheduleResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -137,80 +162,83 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete schedule. Delete schedule. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Schedule name. + :param name: Schedule name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -222,94 +250,104 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}" + } @distributed_trace_async - async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any - ) -> "_models.Schedule": + async def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.Schedule: """Get schedule. Get schedule. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Schedule name. + :param name: Schedule name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Schedule, or the result of cls(response) + :return: Schedule or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Schedule - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Schedule"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Schedule] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -317,98 +355,121 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Schedule', pipeline_response) + deserialized = self._deserialize("Schedule", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}" + } async def _create_or_update_initial( - self, - resource_group_name: str, - workspace_name: str, - name: str, - body: "_models.Schedule", - **kwargs: Any - ) -> "_models.Schedule": - cls = kwargs.pop('cls', None) # type: ClsType["_models.Schedule"] + self, resource_group_name: str, workspace_name: str, name: str, body: Union[_models.Schedule, IO], **kwargs: Any + ) -> _models.Schedule: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'Schedule') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Schedule] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Schedule") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('Schedule', pipeline_response) + deserialized = self._deserialize("Schedule", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('Schedule', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) - if cls: - return cls(pipeline_response, deserialized, response_headers) + deserialized = self._deserialize("Schedule", pipeline_response) - return deserialized + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + return deserialized # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}" + } - @distributed_trace_async + @overload async def begin_create_or_update( self, resource_group_name: str, workspace_name: str, name: str, - body: "_models.Schedule", + body: _models.Schedule, + *, + content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller["_models.Schedule"]: + ) -> AsyncLROPoller[_models.Schedule]: """Create or update schedule. Create or update schedule. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Schedule name. + :param name: Schedule name. Required. :type name: str - :param body: Schedule definition. + :param body: Schedule definition. Required. :type body: ~azure.mgmt.machinelearningservices.models.Schedule + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -420,17 +481,92 @@ async def begin_create_or_update( :return: An instance of AsyncLROPoller that returns either Schedule or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Schedule] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Schedule"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Schedule]: + """Create or update schedule. + + Create or update schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Schedule name. Required. + :type name: str + :param body: Schedule definition. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Schedule or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Schedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, resource_group_name: str, workspace_name: str, name: str, body: Union[_models.Schedule, IO], **kwargs: Any + ) -> AsyncLROPoller[_models.Schedule]: + """Create or update schedule. + + Create or update schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Schedule name. Required. + :type name: str + :param body: Schedule definition. Is either a Schedule type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Schedule or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Schedule or the result of + cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Schedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Schedule] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -439,29 +575,37 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('Schedule', pipeline_response) + deserialized = self._deserialize("Schedule", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_serverless_endpoints_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_serverless_endpoints_operations.py index 37c291e72fe0..acd58aac651c 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_serverless_endpoints_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_serverless_endpoints_operations.py @@ -6,102 +6,127 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._serverless_endpoints_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_request, build_list_keys_request, build_list_request, build_regenerate_keys_request_initial, build_update_request_initial -T = TypeVar('T') +from ...operations._serverless_endpoints_operations import ( + build_create_or_update_request, + build_delete_request, + build_get_request, + build_list_keys_request, + build_list_request, + build_regenerate_keys_request, + build_update_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class ServerlessEndpointsOperations: - """ServerlessEndpointsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class ServerlessEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`serverless_endpoints` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name: str, - workspace_name: str, - skip: Optional[str] = None, - **kwargs: Any - ) -> AsyncIterable["_models.ServerlessEndpointTrackedResourceArmPaginatedResult"]: + self, resource_group_name: str, workspace_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.ServerlessEndpoint"]: """List Serverless Endpoints. List Serverless Endpoints. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either - ServerlessEndpointTrackedResourceArmPaginatedResult or the result of cls(response) + :return: An iterator like instance of either ServerlessEndpoint or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ServerlessEndpointTrackedResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ServerlessEndpointTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ServerlessEndpointTrackedResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -112,16 +137,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ServerlessEndpointTrackedResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -132,80 +156,83 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Delete Serverless Endpoint (asynchronous). Delete Serverless Endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Serverless Endpoint name. + :param name: Serverless Endpoint name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -217,94 +244,106 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } @distributed_trace_async async def get( - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any - ) -> "_models.ServerlessEndpoint": + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.ServerlessEndpoint: """Get Serverless Endpoint. Get Serverless Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Serverless Endpoint name. + :param name: Serverless Endpoint name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ServerlessEndpoint, or the result of cls(response) + :return: ServerlessEndpoint or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ServerlessEndpoint - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ServerlessEndpoint"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -312,77 +351,181 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ServerlessEndpoint', pipeline_response) + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } async def _update_initial( self, resource_group_name: str, workspace_name: str, name: str, - body: "_models.PartialMinimalTrackedResourceWithSkuAndIdentity", + body: Union[_models.PartialMinimalTrackedResourceWithSkuAndIdentity, IO], **kwargs: Any - ) -> Optional["_models.ServerlessEndpoint"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ServerlessEndpoint"]] + ) -> Optional[_models.ServerlessEndpoint]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PartialMinimalTrackedResourceWithSkuAndIdentity') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ServerlessEndpoint]] = kwargs.pop("cls", None) - request = build_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithSkuAndIdentity") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._update_initial.metadata['url'], + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ServerlessEndpoint', pipeline_response) + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.PartialMinimalTrackedResourceWithSkuAndIdentity, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ServerlessEndpoint]: + """Update Serverless Endpoint (asynchronous). + + Update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSkuAndIdentity + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ServerlessEndpoint]: + """Update Serverless Endpoint (asynchronous). + + Update Serverless Endpoint (asynchronous). + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_update( @@ -390,22 +533,28 @@ async def begin_update( resource_group_name: str, workspace_name: str, name: str, - body: "_models.PartialMinimalTrackedResourceWithSkuAndIdentity", + body: Union[_models.PartialMinimalTrackedResourceWithSkuAndIdentity, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.ServerlessEndpoint"]: + ) -> AsyncLROPoller[_models.ServerlessEndpoint]: """Update Serverless Endpoint (asynchronous). Update Serverless Endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Serverless Endpoint name. + :param name: Serverless Endpoint name. Required. :type name: str - :param body: Serverless Endpoint entity to apply during operation. + :param body: Serverless Endpoint entity to apply during operation. Is either a + PartialMinimalTrackedResourceWithSkuAndIdentity type or a IO type. Required. :type body: - ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSkuAndIdentity + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSkuAndIdentity or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -418,17 +567,17 @@ async def begin_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ServerlessEndpoint"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._update_initial( resource_group_name=resource_group_name, @@ -437,93 +586,203 @@ async def begin_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ServerlessEndpoint', pipeline_response) + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } async def _create_or_update_initial( self, resource_group_name: str, workspace_name: str, name: str, - body: "_models.ServerlessEndpoint", + body: Union[_models.ServerlessEndpoint, IO], **kwargs: Any - ) -> "_models.ServerlessEndpoint": - cls = kwargs.pop('cls', None) # type: ClsType["_models.ServerlessEndpoint"] + ) -> _models.ServerlessEndpoint: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ServerlessEndpoint') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ServerlessEndpoint") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ServerlessEndpoint', pipeline_response) + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('ServerlessEndpoint', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ServerlessEndpoint, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ServerlessEndpoint]: + """Create or update Serverless Endpoint (asynchronous). + + Create or update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ServerlessEndpoint + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.ServerlessEndpoint]: + """Create or update Serverless Endpoint (asynchronous). - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}"} # type: ignore + Create or update Serverless Endpoint (asynchronous). + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_create_or_update( @@ -531,21 +790,26 @@ async def begin_create_or_update( resource_group_name: str, workspace_name: str, name: str, - body: "_models.ServerlessEndpoint", + body: Union[_models.ServerlessEndpoint, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.ServerlessEndpoint"]: + ) -> AsyncLROPoller[_models.ServerlessEndpoint]: """Create or update Serverless Endpoint (asynchronous). Create or update Serverless Endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Serverless Endpoint name. + :param name: Serverless Endpoint name. Required. :type name: str - :param body: Serverless Endpoint entity to apply during operation. - :type body: ~azure.mgmt.machinelearningservices.models.ServerlessEndpoint + :param body: Serverless Endpoint entity to apply during operation. Is either a + ServerlessEndpoint type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ServerlessEndpoint or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -558,17 +822,17 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ServerlessEndpoint"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -577,81 +841,93 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ServerlessEndpoint', pipeline_response) + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, + AsyncARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs), + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } @distributed_trace_async async def list_keys( - self, - resource_group_name: str, - workspace_name: str, - name: str, - **kwargs: Any - ) -> "_models.EndpointAuthKeys": + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.EndpointAuthKeys: """List EndpointAuthKeys for an Endpoint using Key-based authentication. List EndpointAuthKeys for an Endpoint using Key-based authentication. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Serverless Endpoint name. + :param name: Serverless Endpoint name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EndpointAuthKeys, or the result of cls(response) + :return: EndpointAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EndpointAuthKeys"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EndpointAuthKeys] = kwargs.pop("cls", None) - request = build_list_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_keys.metadata['url'], + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -659,76 +935,177 @@ async def list_keys( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EndpointAuthKeys', pipeline_response) + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/listKeys"} # type: ignore - + list_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/listKeys" + } async def _regenerate_keys_initial( self, resource_group_name: str, workspace_name: str, name: str, - body: "_models.RegenerateEndpointKeysRequest", + body: Union[_models.RegenerateEndpointKeysRequest, IO], **kwargs: Any - ) -> Optional["_models.EndpointAuthKeys"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EndpointAuthKeys"]] + ) -> Optional[_models.EndpointAuthKeys]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'RegenerateEndpointKeysRequest') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.EndpointAuthKeys]] = kwargs.pop("cls", None) - request = build_regenerate_keys_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "RegenerateEndpointKeysRequest") + + request = build_regenerate_keys_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._regenerate_keys_initial.metadata['url'], + content=_content, + template_url=self._regenerate_keys_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('EndpointAuthKeys', pipeline_response) + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _regenerate_keys_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys"} # type: ignore + _regenerate_keys_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys" + } + + @overload + async def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.RegenerateEndpointKeysRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.EndpointAuthKeys]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: RegenerateKeys request . Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EndpointAuthKeys or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EndpointAuthKeys] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.EndpointAuthKeys]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: RegenerateKeys request . Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either EndpointAuthKeys or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EndpointAuthKeys] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_regenerate_keys( @@ -736,21 +1113,26 @@ async def begin_regenerate_keys( resource_group_name: str, workspace_name: str, name: str, - body: "_models.RegenerateEndpointKeysRequest", + body: Union[_models.RegenerateEndpointKeysRequest, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.EndpointAuthKeys"]: + ) -> AsyncLROPoller[_models.EndpointAuthKeys]: """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Serverless Endpoint name. + :param name: Serverless Endpoint name. Required. :type name: str - :param body: RegenerateKeys request . - :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest + :param body: RegenerateKeys request . Is either a RegenerateEndpointKeysRequest type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -763,17 +1145,17 @@ async def begin_regenerate_keys( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.EndpointAuthKeys] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.EndpointAuthKeys"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EndpointAuthKeys] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._regenerate_keys_initial( resource_group_name=resource_group_name, @@ -782,29 +1164,36 @@ async def begin_regenerate_keys( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('EndpointAuthKeys', pipeline_response) + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_regenerate_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys"} # type: ignore + begin_regenerate_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_usages_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_usages_operations.py index 7ba60ed7a96f..6ca521cf21b3 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_usages_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_usages_operations.py @@ -6,87 +6,105 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import urllib.parse from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request from ...operations._usages_operations import build_list_request -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class UsagesOperations: - """UsagesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class UsagesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`usages` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list( - self, - location: str, - **kwargs: Any - ) -> AsyncIterable["_models.ListUsagesResult"]: + def list(self, location: str, **kwargs: Any) -> AsyncIterable["_models.Usage"]: """Gets the current usage information as well as limits for AML resources for given subscription and location. - :param location: The location for which resource usage is queried. + :param location: The location for which resource usage is queried. Required. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ListUsagesResult or the result of cls(response) + :return: An iterator like instance of either Usage or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ListUsagesResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Usage] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ListUsagesResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListUsagesResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, location=location, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - location=location, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -97,16 +115,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ListUsagesResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -117,8 +134,8 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_virtual_machine_sizes_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_virtual_machine_sizes_operations.py index 9c3958cce441..f8e5ab8481d9 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_virtual_machine_sizes_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_virtual_machine_sizes_operations.py @@ -8,79 +8,89 @@ # -------------------------------------------------------------------------- from typing import Any, Callable, Dict, Optional, TypeVar -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request from ...operations._virtual_machine_sizes_operations import build_list_request -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class VirtualMachineSizesOperations: - """VirtualMachineSizesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class VirtualMachineSizesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`virtual_machine_sizes` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace_async - async def list( - self, - location: str, - **kwargs: Any - ) -> "_models.VirtualMachineSizeListResult": + async def list(self, location: str, **kwargs: Any) -> _models.VirtualMachineSizeListResult: """Returns supported VM Sizes in a location. - :param location: The location upon which virtual-machine-sizes is queried. + :param location: The location upon which virtual-machine-sizes is queried. Required. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: VirtualMachineSizeListResult, or the result of cls(response) + :return: VirtualMachineSizeListResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.VirtualMachineSizeListResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineSizeListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.VirtualMachineSizeListResult] = kwargs.pop("cls", None) - request = build_list_request( location=location, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -88,12 +98,13 @@ async def list( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response) + deserialized = self._deserialize("VirtualMachineSizeListResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes"} # type: ignore - + list.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_workspace_connections_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_workspace_connections_operations.py index 7e9866dee129..8d7fc535dab7 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_workspace_connections_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_workspace_connections_operations.py @@ -6,44 +6,63 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse +from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._workspace_connections_operations import build_create_request, build_delete_request, build_get_request, build_list_request, build_list_secrets_request, build_update_request -T = TypeVar('T') +from ...operations._workspace_connections_operations import ( + build_create_request, + build_delete_request, + build_get_request, + build_list_request, + build_list_secrets_request, + build_test_connection_request, + build_update_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class WorkspaceConnectionsOperations: - """WorkspaceConnectionsOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class WorkspaceConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`workspace_connections` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( @@ -53,58 +72,70 @@ def list( target: Optional[str] = None, category: Optional[str] = None, **kwargs: Any - ) -> AsyncIterable["_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult"]: + ) -> AsyncIterable["_models.WorkspaceConnectionPropertiesV2BasicResource"]: """Lists all the available machine learning workspaces connections under the specified workspace. Lists all the available machine learning workspaces connections under the specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param target: Target of the workspace connection. + :param target: Target of the workspace connection. Default value is None. :type target: str - :param category: Category of the workspace connection. + :param category: Category of the workspace connection. Default value is None. :type category: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either - WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult or the result of cls(response) + :return: An iterator like instance of either WorkspaceConnectionPropertiesV2BasicResource or + the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, target=target, category=category, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - target=target, - category=category, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -112,19 +143,20 @@ def prepare_request(next_link=None): return request async def extract_data(pipeline_response): - deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", pipeline_response) + deserialized = self._deserialize( + "WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", pipeline_response + ) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -135,60 +167,64 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections" + } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - connection_name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any ) -> None: """Delete machine learning workspaces connections by name. Delete machine learning workspaces connections by name. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -199,8 +235,9 @@ async def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" + } @distributed_trace_async async def get( @@ -208,48 +245,61 @@ async def get( resource_group_name: str, workspace_name: str, connection_name: str, + aoai_models_to_deploy: Optional[str] = None, **kwargs: Any - ) -> "_models.WorkspaceConnectionPropertiesV2BasicResource": + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: """Lists machine learning workspaces connections by name. Lists machine learning workspaces connections by name. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str + :param aoai_models_to_deploy: query parameter for which AOAI mode should be deployed. Default + value is None. + :type aoai_models_to_deploy: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnectionPropertiesV2BasicResource, or the result of cls(response) + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnectionPropertiesV2BasicResource"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, + subscription_id=self._config.subscription_id, + aoai_models_to_deploy=aoai_models_to_deploy, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -257,15 +307,82 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnectionPropertiesV2BasicResource', pipeline_response) + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" + } + + @overload + async def update( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[_models.WorkspaceConnectionUpdateParameter] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """Update machine learning workspaces connections under the specified workspace. + + Update machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: Parameters for workspace connection update. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUpdateParameter + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def update( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """Update machine learning workspaces connections under the specified workspace. + Update machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: Parameters for workspace connection update. Default value is None. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def update( @@ -273,58 +390,78 @@ async def update( resource_group_name: str, workspace_name: str, connection_name: str, - body: Optional["_models.WorkspaceConnectionUpdateParameter"] = None, + body: Optional[Union[_models.WorkspaceConnectionUpdateParameter, IO]] = None, **kwargs: Any - ) -> "_models.WorkspaceConnectionPropertiesV2BasicResource": + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: """Update machine learning workspaces connections under the specified workspace. Update machine learning workspaces connections under the specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str - :param body: Parameters for workspace connection update. - :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUpdateParameter + :param body: Parameters for workspace connection update. Is either a + WorkspaceConnectionUpdateParameter type or a IO type. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUpdateParameter or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnectionPropertiesV2BasicResource, or the result of cls(response) + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnectionPropertiesV2BasicResource"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - if body is not None: - _json = self._serialize.body(body, 'WorkspaceConnectionUpdateParameter') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = None + if body is not None: + _json = self._serialize.body(body, "WorkspaceConnectionUpdateParameter") + else: + _json = None request = build_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update.metadata['url'], + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -332,15 +469,85 @@ async def update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnectionPropertiesV2BasicResource', pipeline_response) + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" + } + + @overload + async def create( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[_models.WorkspaceConnectionPropertiesV2BasicResource] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """Create or update machine learning workspaces connections under the specified workspace. + + Create or update machine learning workspaces connections under the specified workspace. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: The object for creating or updating a new workspace connection. Default value is + None. + :type body: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def create( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """Create or update machine learning workspaces connections under the specified workspace. + + Create or update machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: The object for creating or updating a new workspace connection. Default value is + None. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def create( @@ -348,59 +555,79 @@ async def create( resource_group_name: str, workspace_name: str, connection_name: str, - body: Optional["_models.WorkspaceConnectionPropertiesV2BasicResource"] = None, + body: Optional[Union[_models.WorkspaceConnectionPropertiesV2BasicResource, IO]] = None, **kwargs: Any - ) -> "_models.WorkspaceConnectionPropertiesV2BasicResource": + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: """Create or update machine learning workspaces connections under the specified workspace. Create or update machine learning workspaces connections under the specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str - :param body: The object for creating or updating a new workspace connection. + :param body: The object for creating or updating a new workspace connection. Is either a + WorkspaceConnectionPropertiesV2BasicResource type or a IO type. Default value is None. :type body: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnectionPropertiesV2BasicResource, or the result of cls(response) + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnectionPropertiesV2BasicResource"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - if body is not None: - _json = self._serialize.body(body, 'WorkspaceConnectionPropertiesV2BasicResource') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = None + if body is not None: + _json = self._serialize.body(body, "WorkspaceConnectionPropertiesV2BasicResource") + else: + _json = None request = build_create_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create.metadata['url'], + content=_content, + template_url=self.create.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -408,15 +635,16 @@ async def create( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnectionPropertiesV2BasicResource', pipeline_response) + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore - + create.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" + } @distributed_trace_async async def list_secrets( @@ -424,48 +652,61 @@ async def list_secrets( resource_group_name: str, workspace_name: str, connection_name: str, + aoai_models_to_deploy: Optional[str] = None, **kwargs: Any - ) -> "_models.WorkspaceConnectionPropertiesV2BasicResource": + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: """List all the secrets of a machine learning workspaces connections. List all the secrets of a machine learning workspaces connections. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str + :param aoai_models_to_deploy: query parameter for which AOAI mode should be deployed. Default + value is None. + :type aoai_models_to_deploy: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnectionPropertiesV2BasicResource, or the result of cls(response) + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnectionPropertiesV2BasicResource"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] = kwargs.pop("cls", None) - request = build_list_secrets_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, + subscription_id=self._config.subscription_id, + aoai_models_to_deploy=aoai_models_to_deploy, api_version=api_version, - template_url=self.list_secrets.metadata['url'], + template_url=self.list_secrets.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -473,12 +714,253 @@ async def list_secrets( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnectionPropertiesV2BasicResource', pipeline_response) + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_secrets.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/listsecrets"} # type: ignore + list_secrets.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/listsecrets" + } + + async def _test_connection_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[Union[_models.WorkspaceConnectionPropertiesV2BasicResource, IO]] = None, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + if body is not None: + _json = self._serialize.body(body, "WorkspaceConnectionPropertiesV2BasicResource") + else: + _json = None + + request = build_test_connection_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._test_connection_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + + _test_connection_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/testconnection" + } + + @overload + async def begin_test_connection( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[_models.WorkspaceConnectionPropertiesV2BasicResource] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Test machine learning workspaces connections under the specified workspace. + + Test machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: Workspace Connection object. Default value is None. + :type body: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_test_connection( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Test machine learning workspaces connections under the specified workspace. + + Test machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: Workspace Connection object. Default value is None. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_test_connection( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[Union[_models.WorkspaceConnectionPropertiesV2BasicResource, IO]] = None, + **kwargs: Any + ) -> AsyncLROPoller[None]: + """Test machine learning workspaces connections under the specified workspace. + + Test machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: Workspace Connection object. Is either a + WorkspaceConnectionPropertiesV2BasicResource type or a IO type. Default value is None. + :type body: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.AsyncLROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = await self._test_connection_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling + if cont_token: + return AsyncLROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + begin_test_connection.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/testconnection" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_workspace_features_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_workspace_features_operations.py index b954b17e68a6..36b873a7d0cb 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_workspace_features_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_workspace_features_operations.py @@ -6,92 +6,110 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- +import urllib.parse from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request from ...operations._workspace_features_operations import build_list_request -T = TypeVar('T') + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class WorkspaceFeaturesOperations: - """WorkspaceFeaturesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class WorkspaceFeaturesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`workspace_features` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> AsyncIterable["_models.ListAmlUserFeatureResult"]: + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncIterable["_models.AmlUserFeature"]: """Lists all enabled features for a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ListAmlUserFeatureResult or the result of - cls(response) + :return: An iterator like instance of either AmlUserFeature or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.ListAmlUserFeatureResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.AmlUserFeature] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ListAmlUserFeatureResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListAmlUserFeatureResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -102,16 +120,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("ListAmlUserFeatureResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -122,8 +139,8 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_workspaces_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_workspaces_operations.py index 3cc904d9d755..369a8d00a870 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_workspaces_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/aio/operations/_workspaces_operations.py @@ -6,96 +6,130 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union +import urllib.parse +from io import IOBase +from typing import IO, Any, AsyncIterable, Callable, Dict, Optional, TypeVar, Union, cast, overload from azure.core.async_paging import AsyncItemPaged, AsyncList -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling from ... import models as _models from ..._vendor import _convert_request -from ...operations._workspaces_operations import build_create_or_update_request_initial, build_delete_request_initial, build_diagnose_request_initial, build_get_request, build_list_by_resource_group_request, build_list_by_subscription_request, build_list_keys_request, build_list_notebook_access_token_request, build_list_notebook_keys_request, build_list_outbound_network_dependencies_endpoints_request, build_list_storage_account_keys_request, build_prepare_notebook_request_initial, build_resync_keys_request_initial, build_update_request_initial -T = TypeVar('T') +from ...operations._workspaces_operations import ( + build_create_or_update_request, + build_delete_request, + build_diagnose_request, + build_get_request, + build_list_by_resource_group_request, + build_list_by_subscription_request, + build_list_keys_request, + build_list_notebook_access_token_request, + build_list_notebook_keys_request, + build_list_outbound_network_dependencies_endpoints_request, + build_list_storage_account_keys_request, + build_prepare_notebook_request, + build_resync_keys_request, + build_update_request, +) + +T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] -class WorkspacesOperations: - """WorkspacesOperations async operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class WorkspacesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.aio.AzureMachineLearningServices`'s + :attr:`workspaces` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer) -> None: - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs) -> None: + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list_by_subscription( - self, - skip: Optional[str] = None, - kind: Optional[str] = None, - **kwargs: Any - ) -> AsyncIterable["_models.WorkspaceListResult"]: + self, kind: Optional[str] = None, skip: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.Workspace"]: """Lists all the available machine learning workspaces under the specified subscription. Lists all the available machine learning workspaces under the specified subscription. - :param skip: Continuation token for pagination. - :type skip: str - :param kind: Kind of workspace. + :param kind: Kind of workspace. Default value is None. :type kind: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) + :return: An iterator like instance of either Workspace or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_by_subscription_request( subscription_id=self._config.subscription_id, - api_version=api_version, - skip=skip, kind=kind, - template_url=self.list_by_subscription.metadata['url'], + skip=skip, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_by_subscription_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - skip=skip, - kind=kind, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -106,16 +140,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -126,66 +159,75 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_by_subscription.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces"} # type: ignore + list_by_subscription.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces" + } @distributed_trace def list_by_resource_group( - self, - resource_group_name: str, - skip: Optional[str] = None, - kind: Optional[str] = None, - **kwargs: Any - ) -> AsyncIterable["_models.WorkspaceListResult"]: + self, resource_group_name: str, kind: Optional[str] = None, skip: Optional[str] = None, **kwargs: Any + ) -> AsyncIterable["_models.Workspace"]: """Lists all the available machine learning workspaces under the specified resource group. Lists all the available machine learning workspaces under the specified resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param skip: Continuation token for pagination. - :type skip: str - :param kind: Kind of workspace. + :param kind: Kind of workspace. Default value is None. :type kind: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) + :return: An iterator like instance of either Workspace or the result of cls(response) :rtype: - ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_by_resource_group_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, - api_version=api_version, - skip=skip, + subscription_id=self._config.subscription_id, kind=kind, - template_url=self.list_by_resource_group.metadata['url'], + skip=skip, + api_version=api_version, + template_url=self.list_by_resource_group.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_by_resource_group_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - api_version=api_version, - skip=skip, - kind=kind, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -196,16 +238,15 @@ async def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -216,73 +257,80 @@ async def get_next(next_link=None): return pipeline_response + return AsyncItemPaged(get_next, extract_data) - return AsyncItemPaged( - get_next, extract_data - ) - list_by_resource_group.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces"} # type: ignore + list_by_resource_group.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces" + } async def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - force_to_purge: Optional[bool] = False, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, force_to_purge: bool = False, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, force_to_purge=force_to_purge, - template_url=self._delete_initial.metadata['url'], + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if cls: - return cls(pipeline_response, None, {}) + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + if cls: + return cls(pipeline_response, None, response_headers) + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } @distributed_trace_async - async def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - force_to_purge: Optional[bool] = False, - **kwargs: Any + async def begin_delete( + self, resource_group_name: str, workspace_name: str, force_to_purge: bool = False, **kwargs: Any ) -> AsyncLROPoller[None]: """Deletes a machine learning workspace. Deletes a machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param force_to_purge: Flag to indicate delete is a purge request. + :param force_to_purge: Flag to indicate delete is a purge request. Default value is False. :type force_to_purge: bool :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -294,90 +342,101 @@ async def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._delete_initial( + raw_result = await self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, force_to_purge=force_to_purge, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } @distributed_trace_async - async def get( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> "_models.Workspace": + async def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: """Gets the properties of the specified machine learning workspace. Gets the properties of the specified machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Workspace, or the result of cls(response) + :return: Workspace or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Workspace - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -385,87 +444,188 @@ async def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } async def _update_initial( self, resource_group_name: str, workspace_name: str, - body: "_models.WorkspaceUpdateParameters", + body: Union[_models.WorkspaceUpdateParameters, IO], **kwargs: Any - ) -> Optional["_models.Workspace"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Workspace"]] + ) -> Optional[_models.Workspace]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'WorkspaceUpdateParameters') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.Workspace]] = kwargs.pop("cls", None) - request = build_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "WorkspaceUpdateParameters") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._update_initial.metadata['url'], + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + body: _models.WorkspaceUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Updates a machine learning workspace with the specified parameters. + + Updates a machine learning workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: The parameters for updating a machine learning workspace. Required. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_update( + self, + resource_group_name: str, + workspace_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Updates a machine learning workspace with the specified parameters. + + Updates a machine learning workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: The parameters for updating a machine learning workspace. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_update( self, resource_group_name: str, workspace_name: str, - body: "_models.WorkspaceUpdateParameters", + body: Union[_models.WorkspaceUpdateParameters, IO], **kwargs: Any - ) -> AsyncLROPoller["_models.Workspace"]: + ) -> AsyncLROPoller[_models.Workspace]: """Updates a machine learning workspace with the specified parameters. Updates a machine learning workspace with the specified parameters. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param body: The parameters for updating a machine learning workspace. - :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters + :param body: The parameters for updating a machine learning workspace. Is either a + WorkspaceUpdateParameters type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -478,17 +638,17 @@ async def begin_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._update_initial( resource_group_name=resource_group_name, @@ -496,110 +656,133 @@ async def begin_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } async def _create_or_update_initial( - self, - resource_group_name: str, - workspace_name: str, - body: "_models.Workspace", - **kwargs: Any - ) -> Optional["_models.Workspace"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Workspace"]] + self, resource_group_name: str, workspace_name: str, body: Union[_models.Workspace, IO], **kwargs: Any + ) -> Optional[_models.Workspace]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'Workspace') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.Workspace]] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Workspace") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore - + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } - @distributed_trace_async + @overload async def begin_create_or_update( self, resource_group_name: str, workspace_name: str, - body: "_models.Workspace", + body: _models.Workspace, + *, + content_type: str = "application/json", **kwargs: Any - ) -> AsyncLROPoller["_models.Workspace"]: + ) -> AsyncLROPoller[_models.Workspace]: """Creates or updates a workspace with the specified parameters. Creates or updates a workspace with the specified parameters. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param body: The parameters for creating or updating a machine learning workspace. + :param body: The parameters for creating or updating a machine learning workspace. Required. :type body: ~azure.mgmt.machinelearningservices.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -612,17 +795,90 @@ async def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + async def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Creates or updates a workspace with the specified parameters. + + Creates or updates a workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: The parameters for creating or updating a machine learning workspace. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace_async + async def begin_create_or_update( + self, resource_group_name: str, workspace_name: str, body: Union[_models.Workspace, IO], **kwargs: Any + ) -> AsyncLROPoller[_models.Workspace]: + """Creates or updates a workspace with the specified parameters. + + Creates or updates a workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: The parameters for creating or updating a machine learning workspace. Is either a + Workspace type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Workspace or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either Workspace or the result of + cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._create_or_update_initial( resource_group_name=resource_group_name, @@ -630,113 +886,219 @@ async def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } async def _diagnose_initial( self, resource_group_name: str, workspace_name: str, - body: Optional["_models.DiagnoseWorkspaceParameters"] = None, + body: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, **kwargs: Any - ) -> Optional["_models.DiagnoseResponseResult"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.DiagnoseResponseResult"]] + ) -> Optional[_models.DiagnoseResponseResult]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.DiagnoseResponseResult]] = kwargs.pop("cls", None) - if body is not None: - _json = self._serialize.body(body, 'DiagnoseWorkspaceParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = None + if body is not None: + _json = self._serialize.body(body, "DiagnoseWorkspaceParameters") + else: + _json = None - request = build_diagnose_request_initial( - subscription_id=self._config.subscription_id, + request = build_diagnose_request( resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._diagnose_initial.metadata['url'], + content=_content, + template_url=self._diagnose_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('DiagnoseResponseResult', pipeline_response) + deserialized = self._deserialize("DiagnoseResponseResult", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _diagnose_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose"} # type: ignore + _diagnose_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose" + } + + @overload + async def begin_diagnose( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[_models.DiagnoseWorkspaceParameters] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DiagnoseResponseResult]: + """Diagnose workspace setup issue. + + Diagnose workspace setup issue. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: The parameter of diagnosing workspace health. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DiagnoseResponseResult or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.DiagnoseResponseResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + async def begin_diagnose( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> AsyncLROPoller[_models.DiagnoseResponseResult]: + """Diagnose workspace setup issue. + + Diagnose workspace setup issue. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: The parameter of diagnosing workspace health. Default value is None. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for + this operation to not poll, or pass in your own initialized polling object for a personal + polling strategy. + :paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of AsyncLROPoller that returns either DiagnoseResponseResult or the result + of cls(response) + :rtype: + ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.DiagnoseResponseResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace_async async def begin_diagnose( self, resource_group_name: str, workspace_name: str, - body: Optional["_models.DiagnoseWorkspaceParameters"] = None, + body: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, **kwargs: Any - ) -> AsyncLROPoller["_models.DiagnoseResponseResult"]: + ) -> AsyncLROPoller[_models.DiagnoseResponseResult]: """Diagnose workspace setup issue. Diagnose workspace setup issue. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param body: The parameter of diagnosing workspace health. - :type body: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters + :param body: The parameter of diagnosing workspace health. Is either a + DiagnoseWorkspaceParameters type or a IO type. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for @@ -749,17 +1111,17 @@ async def begin_diagnose( of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.DiagnoseResponseResult] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.DiagnoseResponseResult"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DiagnoseResponseResult] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._diagnose_initial( resource_group_name=resource_group_name, @@ -767,40 +1129,44 @@ async def begin_diagnose( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('DiagnoseResponseResult', pipeline_response) + deserialized = self._deserialize("DiagnoseResponseResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_diagnose.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose"} # type: ignore + begin_diagnose.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose" + } @distributed_trace_async async def list_keys( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> "_models.ListWorkspaceKeysResult": + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListWorkspaceKeysResult: """Lists all the keys associated with this workspace. This includes keys for the storage account, app insights and password for container registry. @@ -808,38 +1174,46 @@ async def list_keys( app insights and password for container registry. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListWorkspaceKeysResult, or the result of cls(response) + :return: ListWorkspaceKeysResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ListWorkspaceKeysResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListWorkspaceKeysResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ListWorkspaceKeysResult] = kwargs.pop("cls", None) - request = build_list_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_keys.metadata['url'], + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -847,60 +1221,66 @@ async def list_keys( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response) + deserialized = self._deserialize("ListWorkspaceKeysResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys"} # type: ignore - + list_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys" + } @distributed_trace_async async def list_notebook_access_token( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> "_models.NotebookAccessTokenResult": + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.NotebookAccessTokenResult: """Get Azure Machine Learning Workspace notebook access token. Get Azure Machine Learning Workspace notebook access token. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: NotebookAccessTokenResult, or the result of cls(response) + :return: NotebookAccessTokenResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.NotebookAccessTokenResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.NotebookAccessTokenResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.NotebookAccessTokenResult] = kwargs.pop("cls", None) - request = build_list_notebook_access_token_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_notebook_access_token.metadata['url'], + template_url=self.list_notebook_access_token.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -908,60 +1288,66 @@ async def list_notebook_access_token( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('NotebookAccessTokenResult', pipeline_response) + deserialized = self._deserialize("NotebookAccessTokenResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_notebook_access_token.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken"} # type: ignore - + list_notebook_access_token.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken" + } @distributed_trace_async async def list_notebook_keys( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> "_models.ListNotebookKeysResult": + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListNotebookKeysResult: """Lists keys of Azure Machine Learning Workspaces notebook. Lists keys of Azure Machine Learning Workspaces notebook. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListNotebookKeysResult, or the result of cls(response) + :return: ListNotebookKeysResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListNotebookKeysResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ListNotebookKeysResult] = kwargs.pop("cls", None) - request = build_list_notebook_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_notebook_keys.metadata['url'], + template_url=self.list_notebook_keys.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -969,60 +1355,66 @@ async def list_notebook_keys( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ListNotebookKeysResult', pipeline_response) + deserialized = self._deserialize("ListNotebookKeysResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_notebook_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys"} # type: ignore - + list_notebook_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys" + } @distributed_trace_async async def list_storage_account_keys( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> "_models.ListStorageAccountKeysResult": + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListStorageAccountKeysResult: """Lists keys of Azure Machine Learning Workspace's storage account. Lists keys of Azure Machine Learning Workspace's storage account. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListStorageAccountKeysResult, or the result of cls(response) + :return: ListStorageAccountKeysResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ListStorageAccountKeysResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListStorageAccountKeysResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ListStorageAccountKeysResult] = kwargs.pop("cls", None) - request = build_list_storage_account_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_storage_account_keys.metadata['url'], + template_url=self.list_storage_account_keys.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1030,23 +1422,21 @@ async def list_storage_account_keys( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ListStorageAccountKeysResult', pipeline_response) + deserialized = self._deserialize("ListStorageAccountKeysResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_storage_account_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys"} # type: ignore - + list_storage_account_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys" + } @distributed_trace_async async def list_outbound_network_dependencies_endpoints( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> "_models.ExternalFQDNResponse": + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ExternalFQDNResponse: """Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) programmatically. @@ -1054,38 +1444,46 @@ async def list_outbound_network_dependencies_endpoints( programmatically. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExternalFQDNResponse, or the result of cls(response) + :return: ExternalFQDNResponse or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ExternalFQDNResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ExternalFQDNResponse"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ExternalFQDNResponse] = kwargs.pop("cls", None) - request = build_list_outbound_network_dependencies_endpoints_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_outbound_network_dependencies_endpoints.metadata['url'], + template_url=self.list_outbound_network_dependencies_endpoints.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1093,84 +1491,88 @@ async def list_outbound_network_dependencies_endpoints( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ExternalFQDNResponse', pipeline_response) + deserialized = self._deserialize("ExternalFQDNResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_outbound_network_dependencies_endpoints.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints"} # type: ignore - + list_outbound_network_dependencies_endpoints.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints" + } async def _prepare_notebook_initial( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> Optional["_models.NotebookResourceInfo"]: - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.NotebookResourceInfo"]] + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> Optional[_models.NotebookResourceInfo]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_prepare_notebook_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Optional[_models.NotebookResourceInfo]] = kwargs.pop("cls", None) + + request = build_prepare_notebook_request( resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._prepare_notebook_initial.metadata['url'], + template_url=self._prepare_notebook_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) + deserialized = self._deserialize("NotebookResourceInfo", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _prepare_notebook_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook"} # type: ignore - + _prepare_notebook_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook" + } @distributed_trace_async async def begin_prepare_notebook( - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any - ) -> AsyncLROPoller["_models.NotebookResourceInfo"]: + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> AsyncLROPoller[_models.NotebookResourceInfo]: """Prepare Azure Machine Learning Workspace's notebook resource. Prepare Azure Machine Learning Workspace's notebook resource. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -1184,102 +1586,111 @@ async def begin_prepare_notebook( of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.machinelearningservices.models.NotebookResourceInfo] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.NotebookResourceInfo"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.NotebookResourceInfo] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = await self._prepare_notebook_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) + deserialized = self._deserialize("NotebookResourceInfo", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_prepare_notebook.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook"} # type: ignore + begin_prepare_notebook.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook" + } async def _resync_keys_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any + self, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> None: - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_resync_keys_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_resync_keys_request( resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._resync_keys_initial.metadata['url'], + template_url=self._resync_keys_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = await self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _resync_keys_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys"} # type: ignore - + _resync_keys_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys" + } @distributed_trace_async - async def begin_resync_keys( # pylint: disable=inconsistent-return-statements - self, - resource_group_name: str, - workspace_name: str, - **kwargs: Any + async def begin_resync_keys( + self, resource_group_name: str, workspace_name: str, **kwargs: Any ) -> AsyncLROPoller[None]: """Resync all the keys associated with this workspace.This includes keys for the storage account, app insights and password for container registry. @@ -1288,8 +1699,9 @@ async def begin_resync_keys( # pylint: disable=inconsistent-return-statements app insights and password for container registry. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -1301,41 +1713,49 @@ async def begin_resync_keys( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of AsyncLROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.AsyncLROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = await self._resync_keys_initial( + raw_result = await self._resync_keys_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = AsyncNoPolling() - else: polling_method = polling + if polling is True: + polling_method: AsyncPollingMethod = cast( + AsyncPollingMethod, AsyncARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(AsyncPollingMethod, AsyncNoPolling()) + else: + polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) + return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_resync_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys"} # type: ignore + begin_resync_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/__init__.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/__init__.py index 222eb55c134a..1c44e5859290 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/__init__.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/__init__.py @@ -6,1164 +6,8 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -try: - from ._models_py3 import AKS - from ._models_py3 import AKSSchema - from ._models_py3 import AKSSchemaProperties - from ._models_py3 import AccessKeyAuthTypeWorkspaceConnectionProperties - from ._models_py3 import AccountKeyDatastoreCredentials - from ._models_py3 import AccountKeyDatastoreSecrets - from ._models_py3 import AcrDetails - from ._models_py3 import AksComputeSecrets - from ._models_py3 import AksComputeSecretsProperties - from ._models_py3 import AksNetworkingConfiguration - from ._models_py3 import AllFeatures - from ._models_py3 import AllNodes - from ._models_py3 import AmlCompute - from ._models_py3 import AmlComputeNodeInformation - from ._models_py3 import AmlComputeNodesInformation - from ._models_py3 import AmlComputeProperties - from ._models_py3 import AmlComputeSchema - from ._models_py3 import AmlOperation - from ._models_py3 import AmlOperationListResult - from ._models_py3 import AmlToken - from ._models_py3 import AmlTokenComputeIdentity - from ._models_py3 import AmlUserFeature - from ._models_py3 import ApiKeyAuthWorkspaceConnectionProperties - from ._models_py3 import ArmResourceId - from ._models_py3 import AssetBase - from ._models_py3 import AssetContainer - from ._models_py3 import AssetJobInput - from ._models_py3 import AssetJobOutput - from ._models_py3 import AssetReferenceBase - from ._models_py3 import AssignedUser - from ._models_py3 import AutoDeleteSetting - from ._models_py3 import AutoForecastHorizon - from ._models_py3 import AutoMLJob - from ._models_py3 import AutoMLVertical - from ._models_py3 import AutoNCrossValidations - from ._models_py3 import AutoPauseProperties - from ._models_py3 import AutoScaleProperties - from ._models_py3 import AutoSeasonality - from ._models_py3 import AutoTargetLags - from ._models_py3 import AutoTargetRollingWindowSize - from ._models_py3 import AutologgerSettings - from ._models_py3 import AzMonMonitoringAlertNotificationSettings - from ._models_py3 import AzureBlobDatastore - from ._models_py3 import AzureDataLakeGen1Datastore - from ._models_py3 import AzureDataLakeGen2Datastore - from ._models_py3 import AzureDatastore - from ._models_py3 import AzureDevOpsWebhook - from ._models_py3 import AzureFileDatastore - from ._models_py3 import AzureMLBatchInferencingServer - from ._models_py3 import AzureMLOnlineInferencingServer - from ._models_py3 import BanditPolicy - from ._models_py3 import BaseEnvironmentId - from ._models_py3 import BaseEnvironmentSource - from ._models_py3 import BatchDeployment - from ._models_py3 import BatchDeploymentConfiguration - from ._models_py3 import BatchDeploymentProperties - from ._models_py3 import BatchDeploymentTrackedResourceArmPaginatedResult - from ._models_py3 import BatchEndpoint - from ._models_py3 import BatchEndpointDefaults - from ._models_py3 import BatchEndpointProperties - from ._models_py3 import BatchEndpointTrackedResourceArmPaginatedResult - from ._models_py3 import BatchPipelineComponentDeploymentConfiguration - from ._models_py3 import BatchRetrySettings - from ._models_py3 import BayesianSamplingAlgorithm - from ._models_py3 import BindOptions - from ._models_py3 import BlobReferenceForConsumptionDto - from ._models_py3 import BuildContext - from ._models_py3 import CategoricalDataDriftMetricThreshold - from ._models_py3 import CategoricalDataQualityMetricThreshold - from ._models_py3 import CategoricalPredictionDriftMetricThreshold - from ._models_py3 import CertificateDatastoreCredentials - from ._models_py3 import CertificateDatastoreSecrets - from ._models_py3 import Classification - from ._models_py3 import ClassificationModelPerformanceMetricThreshold - from ._models_py3 import ClassificationTrainingSettings - from ._models_py3 import ClusterUpdateParameters - from ._models_py3 import CocoExportSummary - from ._models_py3 import CodeConfiguration - from ._models_py3 import CodeContainer - from ._models_py3 import CodeContainerProperties - from ._models_py3 import CodeContainerResourceArmPaginatedResult - from ._models_py3 import CodeVersion - from ._models_py3 import CodeVersionProperties - from ._models_py3 import CodeVersionResourceArmPaginatedResult - from ._models_py3 import Collection - from ._models_py3 import ColumnTransformer - from ._models_py3 import CommandJob - from ._models_py3 import CommandJobLimits - from ._models_py3 import ComponentConfiguration - from ._models_py3 import ComponentContainer - from ._models_py3 import ComponentContainerProperties - from ._models_py3 import ComponentContainerResourceArmPaginatedResult - from ._models_py3 import ComponentVersion - from ._models_py3 import ComponentVersionProperties - from ._models_py3 import ComponentVersionResourceArmPaginatedResult - from ._models_py3 import Compute - from ._models_py3 import ComputeInstance - from ._models_py3 import ComputeInstanceApplication - from ._models_py3 import ComputeInstanceAutologgerSettings - from ._models_py3 import ComputeInstanceConnectivityEndpoints - from ._models_py3 import ComputeInstanceContainer - from ._models_py3 import ComputeInstanceCreatedBy - from ._models_py3 import ComputeInstanceDataDisk - from ._models_py3 import ComputeInstanceDataMount - from ._models_py3 import ComputeInstanceEnvironmentInfo - from ._models_py3 import ComputeInstanceLastOperation - from ._models_py3 import ComputeInstanceProperties - from ._models_py3 import ComputeInstanceSchema - from ._models_py3 import ComputeInstanceSshSettings - from ._models_py3 import ComputeInstanceVersion - from ._models_py3 import ComputeResource - from ._models_py3 import ComputeResourceSchema - from ._models_py3 import ComputeRuntimeDto - from ._models_py3 import ComputeSchedules - from ._models_py3 import ComputeSecrets - from ._models_py3 import ComputeStartStopSchedule - from ._models_py3 import ContainerResourceRequirements - from ._models_py3 import ContainerResourceSettings - from ._models_py3 import CosmosDbSettings - from ._models_py3 import CreateMonitorAction - from ._models_py3 import Cron - from ._models_py3 import CronTrigger - from ._models_py3 import CsvExportSummary - from ._models_py3 import CustomForecastHorizon - from ._models_py3 import CustomInferencingServer - from ._models_py3 import CustomKeys - from ._models_py3 import CustomKeysWorkspaceConnectionProperties - from ._models_py3 import CustomMetricThreshold - from ._models_py3 import CustomModelJobInput - from ._models_py3 import CustomModelJobOutput - from ._models_py3 import CustomMonitoringSignal - from ._models_py3 import CustomNCrossValidations - from ._models_py3 import CustomSeasonality - from ._models_py3 import CustomService - from ._models_py3 import CustomTargetLags - from ._models_py3 import CustomTargetRollingWindowSize - from ._models_py3 import DataCollector - from ._models_py3 import DataContainer - from ._models_py3 import DataContainerProperties - from ._models_py3 import DataContainerResourceArmPaginatedResult - from ._models_py3 import DataDriftMetricThresholdBase - from ._models_py3 import DataDriftMonitoringSignal - from ._models_py3 import DataFactory - from ._models_py3 import DataImport - from ._models_py3 import DataImportSource - from ._models_py3 import DataLakeAnalytics - from ._models_py3 import DataLakeAnalyticsSchema - from ._models_py3 import DataLakeAnalyticsSchemaProperties - from ._models_py3 import DataPathAssetReference - from ._models_py3 import DataQualityMetricThresholdBase - from ._models_py3 import DataQualityMonitoringSignal - from ._models_py3 import DataVersionBase - from ._models_py3 import DataVersionBaseProperties - from ._models_py3 import DataVersionBaseResourceArmPaginatedResult - from ._models_py3 import DatabaseSource - from ._models_py3 import Databricks - from ._models_py3 import DatabricksComputeSecrets - from ._models_py3 import DatabricksComputeSecretsProperties - from ._models_py3 import DatabricksProperties - from ._models_py3 import DatabricksSchema - from ._models_py3 import DatasetExportSummary - from ._models_py3 import Datastore - from ._models_py3 import DatastoreCredentials - from ._models_py3 import DatastoreProperties - from ._models_py3 import DatastoreResourceArmPaginatedResult - from ._models_py3 import DatastoreSecrets - from ._models_py3 import DefaultScaleSettings - from ._models_py3 import DeploymentLogs - from ._models_py3 import DeploymentLogsRequest - from ._models_py3 import DeploymentResourceConfiguration - from ._models_py3 import DiagnoseRequestProperties - from ._models_py3 import DiagnoseResponseResult - from ._models_py3 import DiagnoseResponseResultValue - from ._models_py3 import DiagnoseResult - from ._models_py3 import DiagnoseWorkspaceParameters - from ._models_py3 import DistributionConfiguration - from ._models_py3 import Docker - from ._models_py3 import EarlyTerminationPolicy - from ._models_py3 import EmailMonitoringAlertNotificationSettings - from ._models_py3 import EncryptionKeyVaultUpdateProperties - from ._models_py3 import EncryptionProperty - from ._models_py3 import EncryptionUpdateProperties - from ._models_py3 import Endpoint - from ._models_py3 import EndpointAuthKeys - from ._models_py3 import EndpointAuthToken - from ._models_py3 import EndpointDeploymentPropertiesBase - from ._models_py3 import EndpointPropertiesBase - from ._models_py3 import EndpointScheduleAction - from ._models_py3 import EnvironmentContainer - from ._models_py3 import EnvironmentContainerProperties - from ._models_py3 import EnvironmentContainerResourceArmPaginatedResult - from ._models_py3 import EnvironmentVariable - from ._models_py3 import EnvironmentVersion - from ._models_py3 import EnvironmentVersionProperties - from ._models_py3 import EnvironmentVersionResourceArmPaginatedResult - from ._models_py3 import ErrorAdditionalInfo - from ._models_py3 import ErrorDetail - from ._models_py3 import ErrorResponse - from ._models_py3 import EstimatedVMPrice - from ._models_py3 import EstimatedVMPrices - from ._models_py3 import ExportSummary - from ._models_py3 import ExternalFQDNResponse - from ._models_py3 import FQDNEndpoint - from ._models_py3 import FQDNEndpointDetail - from ._models_py3 import FQDNEndpoints - from ._models_py3 import FQDNEndpointsPropertyBag - from ._models_py3 import Feature - from ._models_py3 import FeatureAttributionDriftMonitoringSignal - from ._models_py3 import FeatureAttributionMetricThreshold - from ._models_py3 import FeatureProperties - from ._models_py3 import FeatureResourceArmPaginatedResult - from ._models_py3 import FeatureStoreSettings - from ._models_py3 import FeatureSubset - from ._models_py3 import FeatureWindow - from ._models_py3 import FeaturesetContainer - from ._models_py3 import FeaturesetContainerProperties - from ._models_py3 import FeaturesetContainerResourceArmPaginatedResult - from ._models_py3 import FeaturesetSpecification - from ._models_py3 import FeaturesetVersion - from ._models_py3 import FeaturesetVersionBackfillRequest - from ._models_py3 import FeaturesetVersionBackfillResponse - from ._models_py3 import FeaturesetVersionProperties - from ._models_py3 import FeaturesetVersionResourceArmPaginatedResult - from ._models_py3 import FeaturestoreEntityContainer - from ._models_py3 import FeaturestoreEntityContainerProperties - from ._models_py3 import FeaturestoreEntityContainerResourceArmPaginatedResult - from ._models_py3 import FeaturestoreEntityVersion - from ._models_py3 import FeaturestoreEntityVersionProperties - from ._models_py3 import FeaturestoreEntityVersionResourceArmPaginatedResult - from ._models_py3 import FeaturizationSettings - from ._models_py3 import FileSystemSource - from ._models_py3 import FixedInputData - from ._models_py3 import FlavorData - from ._models_py3 import ForecastHorizon - from ._models_py3 import Forecasting - from ._models_py3 import ForecastingSettings - from ._models_py3 import ForecastingTrainingSettings - from ._models_py3 import FqdnOutboundRule - from ._models_py3 import GenerationSafetyQualityMetricThreshold - from ._models_py3 import GenerationSafetyQualityMonitoringSignal - from ._models_py3 import GenerationTokenStatisticsMetricThreshold - from ._models_py3 import GenerationTokenStatisticsSignal - from ._models_py3 import GridSamplingAlgorithm - from ._models_py3 import HDInsight - from ._models_py3 import HDInsightProperties - from ._models_py3 import HDInsightSchema - from ._models_py3 import HdfsDatastore - from ._models_py3 import IdAssetReference - from ._models_py3 import IdentityConfiguration - from ._models_py3 import IdentityForCmk - from ._models_py3 import IdleShutdownSetting - from ._models_py3 import Image - from ._models_py3 import ImageClassification - from ._models_py3 import ImageClassificationBase - from ._models_py3 import ImageClassificationMultilabel - from ._models_py3 import ImageInstanceSegmentation - from ._models_py3 import ImageLimitSettings - from ._models_py3 import ImageMetadata - from ._models_py3 import ImageModelDistributionSettings - from ._models_py3 import ImageModelDistributionSettingsClassification - from ._models_py3 import ImageModelDistributionSettingsObjectDetection - from ._models_py3 import ImageModelSettings - from ._models_py3 import ImageModelSettingsClassification - from ._models_py3 import ImageModelSettingsObjectDetection - from ._models_py3 import ImageObjectDetection - from ._models_py3 import ImageObjectDetectionBase - from ._models_py3 import ImageSweepSettings - from ._models_py3 import ImageVertical - from ._models_py3 import ImportDataAction - from ._models_py3 import IndexColumn - from ._models_py3 import InferenceContainerProperties - from ._models_py3 import InferencingServer - from ._models_py3 import InstanceTypeSchema - from ._models_py3 import InstanceTypeSchemaResources - from ._models_py3 import IntellectualProperty - from ._models_py3 import JobBase - from ._models_py3 import JobBaseProperties - from ._models_py3 import JobBaseResourceArmPaginatedResult - from ._models_py3 import JobInput - from ._models_py3 import JobLimits - from ._models_py3 import JobOutput - from ._models_py3 import JobResourceConfiguration - from ._models_py3 import JobScheduleAction - from ._models_py3 import JobService - from ._models_py3 import KerberosCredentials - from ._models_py3 import KerberosKeytabCredentials - from ._models_py3 import KerberosKeytabSecrets - from ._models_py3 import KerberosPasswordCredentials - from ._models_py3 import KerberosPasswordSecrets - from ._models_py3 import KeyVaultProperties - from ._models_py3 import Kubernetes - from ._models_py3 import KubernetesOnlineDeployment - from ._models_py3 import KubernetesProperties - from ._models_py3 import KubernetesSchema - from ._models_py3 import LabelCategory - from ._models_py3 import LabelClass - from ._models_py3 import LabelingDataConfiguration - from ._models_py3 import LabelingJob - from ._models_py3 import LabelingJobImageProperties - from ._models_py3 import LabelingJobInstructions - from ._models_py3 import LabelingJobMediaProperties - from ._models_py3 import LabelingJobProperties - from ._models_py3 import LabelingJobResourceArmPaginatedResult - from ._models_py3 import LabelingJobTextProperties - from ._models_py3 import LakeHouseArtifact - from ._models_py3 import ListAmlUserFeatureResult - from ._models_py3 import ListNotebookKeysResult - from ._models_py3 import ListStorageAccountKeysResult - from ._models_py3 import ListUsagesResult - from ._models_py3 import ListWorkspaceKeysResult - from ._models_py3 import ListWorkspaceQuotas - from ._models_py3 import LiteralJobInput - from ._models_py3 import MLAssistConfiguration - from ._models_py3 import MLAssistConfigurationDisabled - from ._models_py3 import MLAssistConfigurationEnabled - from ._models_py3 import MLFlowModelJobInput - from ._models_py3 import MLFlowModelJobOutput - from ._models_py3 import MLTableData - from ._models_py3 import MLTableJobInput - from ._models_py3 import MLTableJobOutput - from ._models_py3 import ManagedComputeIdentity - from ._models_py3 import ManagedIdentity - from ._models_py3 import ManagedIdentityAuthTypeWorkspaceConnectionProperties - from ._models_py3 import ManagedNetworkProvisionOptions - from ._models_py3 import ManagedNetworkProvisionStatus - from ._models_py3 import ManagedNetworkSettings - from ._models_py3 import ManagedOnlineDeployment - from ._models_py3 import ManagedServiceIdentity - from ._models_py3 import MaterializationComputeResource - from ._models_py3 import MaterializationSettings - from ._models_py3 import MedianStoppingPolicy - from ._models_py3 import ModelConfiguration - from ._models_py3 import ModelContainer - from ._models_py3 import ModelContainerProperties - from ._models_py3 import ModelContainerResourceArmPaginatedResult - from ._models_py3 import ModelPackageInput - from ._models_py3 import ModelPerformanceMetricThresholdBase - from ._models_py3 import ModelPerformanceSignal - from ._models_py3 import ModelProfile - from ._models_py3 import ModelVersion - from ._models_py3 import ModelVersionProperties - from ._models_py3 import ModelVersionResourceArmPaginatedResult - from ._models_py3 import MonitorComputeConfigurationBase - from ._models_py3 import MonitorComputeIdentityBase - from ._models_py3 import MonitorDefinition - from ._models_py3 import MonitorServerlessSparkCompute - from ._models_py3 import MonitoringAlertNotificationSettingsBase - from ._models_py3 import MonitoringDataSegment - from ._models_py3 import MonitoringFeatureFilterBase - from ._models_py3 import MonitoringInputDataBase - from ._models_py3 import MonitoringSignalBase - from ._models_py3 import MonitoringTarget - from ._models_py3 import MonitoringThreshold - from ._models_py3 import MonitoringWorkspaceConnection - from ._models_py3 import Mpi - from ._models_py3 import NCrossValidations - from ._models_py3 import NlpFixedParameters - from ._models_py3 import NlpParameterSubspace - from ._models_py3 import NlpSweepSettings - from ._models_py3 import NlpVertical - from ._models_py3 import NlpVerticalFeaturizationSettings - from ._models_py3 import NlpVerticalLimitSettings - from ._models_py3 import NodeStateCounts - from ._models_py3 import Nodes - from ._models_py3 import NoneAuthTypeWorkspaceConnectionProperties - from ._models_py3 import NoneDatastoreCredentials - from ._models_py3 import NotebookAccessTokenResult - from ._models_py3 import NotebookPreparationError - from ._models_py3 import NotebookResourceInfo - from ._models_py3 import NotificationSetting - from ._models_py3 import NumericalDataDriftMetricThreshold - from ._models_py3 import NumericalDataQualityMetricThreshold - from ._models_py3 import NumericalPredictionDriftMetricThreshold - from ._models_py3 import Objective - from ._models_py3 import OneLakeArtifact - from ._models_py3 import OneLakeDatastore - from ._models_py3 import OnlineDeployment - from ._models_py3 import OnlineDeploymentProperties - from ._models_py3 import OnlineDeploymentTrackedResourceArmPaginatedResult - from ._models_py3 import OnlineEndpoint - from ._models_py3 import OnlineEndpointProperties - from ._models_py3 import OnlineEndpointTrackedResourceArmPaginatedResult - from ._models_py3 import OnlineInferenceConfiguration - from ._models_py3 import OnlineRequestSettings - from ._models_py3 import OnlineScaleSettings - from ._models_py3 import OperationDisplay - from ._models_py3 import OsPatchingStatus - from ._models_py3 import OutboundRule - from ._models_py3 import OutboundRuleBasicResource - from ._models_py3 import OutboundRuleListResult - from ._models_py3 import OutputPathAssetReference - from ._models_py3 import PATAuthTypeWorkspaceConnectionProperties - from ._models_py3 import PackageInputPathBase - from ._models_py3 import PackageInputPathId - from ._models_py3 import PackageInputPathUrl - from ._models_py3 import PackageInputPathVersion - from ._models_py3 import PackageRequest - from ._models_py3 import PackageResponse - from ._models_py3 import PaginatedComputeResourcesList - from ._models_py3 import PartialBatchDeployment - from ._models_py3 import PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties - from ._models_py3 import PartialJobBase - from ._models_py3 import PartialJobBasePartialResource - from ._models_py3 import PartialManagedServiceIdentity - from ._models_py3 import PartialMinimalTrackedResource - from ._models_py3 import PartialMinimalTrackedResourceWithIdentity - from ._models_py3 import PartialMinimalTrackedResourceWithSku - from ._models_py3 import PartialMinimalTrackedResourceWithSkuAndIdentity - from ._models_py3 import PartialNotificationSetting - from ._models_py3 import PartialRegistryPartialTrackedResource - from ._models_py3 import PartialSku - from ._models_py3 import Password - from ._models_py3 import PendingUploadCredentialDto - from ._models_py3 import PendingUploadRequestDto - from ._models_py3 import PendingUploadResponseDto - from ._models_py3 import PersonalComputeInstanceSettings - from ._models_py3 import PipelineJob - from ._models_py3 import PredictionDriftMetricThresholdBase - from ._models_py3 import PredictionDriftMonitoringSignal - from ._models_py3 import PrivateEndpoint - from ._models_py3 import PrivateEndpointConnection - from ._models_py3 import PrivateEndpointConnectionListResult - from ._models_py3 import PrivateEndpointDestination - from ._models_py3 import PrivateEndpointOutboundRule - from ._models_py3 import PrivateEndpointResource - from ._models_py3 import PrivateLinkResource - from ._models_py3 import PrivateLinkResourceListResult - from ._models_py3 import PrivateLinkServiceConnectionState - from ._models_py3 import ProbeSettings - from ._models_py3 import ProgressMetrics - from ._models_py3 import PyTorch - from ._models_py3 import QueueSettings - from ._models_py3 import QuotaBaseProperties - from ._models_py3 import QuotaUpdateParameters - from ._models_py3 import RandomSamplingAlgorithm - from ._models_py3 import Ray - from ._models_py3 import Recurrence - from ._models_py3 import RecurrenceSchedule - from ._models_py3 import RecurrenceTrigger - from ._models_py3 import RegenerateEndpointKeysRequest - from ._models_py3 import Registry - from ._models_py3 import RegistryListCredentialsResult - from ._models_py3 import RegistryPartialManagedServiceIdentity - from ._models_py3 import RegistryPrivateEndpointConnection - from ._models_py3 import RegistryPrivateLinkServiceConnectionState - from ._models_py3 import RegistryRegionArmDetails - from ._models_py3 import RegistryTrackedResourceArmPaginatedResult - from ._models_py3 import Regression - from ._models_py3 import RegressionModelPerformanceMetricThreshold - from ._models_py3 import RegressionTrainingSettings - from ._models_py3 import RequestLogging - from ._models_py3 import ResizeSchema - from ._models_py3 import Resource - from ._models_py3 import ResourceBase - from ._models_py3 import ResourceConfiguration - from ._models_py3 import ResourceId - from ._models_py3 import ResourceName - from ._models_py3 import ResourceQuota - from ._models_py3 import Route - from ._models_py3 import SASAuthTypeWorkspaceConnectionProperties - from ._models_py3 import SASCredentialDto - from ._models_py3 import SamplingAlgorithm - from ._models_py3 import SasDatastoreCredentials - from ._models_py3 import SasDatastoreSecrets - from ._models_py3 import ScaleSettings - from ._models_py3 import ScaleSettingsInformation - from ._models_py3 import Schedule - from ._models_py3 import ScheduleActionBase - from ._models_py3 import ScheduleBase - from ._models_py3 import ScheduleProperties - from ._models_py3 import ScheduleResourceArmPaginatedResult - from ._models_py3 import ScriptReference - from ._models_py3 import ScriptsToExecute - from ._models_py3 import Seasonality - from ._models_py3 import SecretConfiguration - from ._models_py3 import ServerlessEndpoint - from ._models_py3 import ServerlessEndpointProperties - from ._models_py3 import ServerlessEndpointTrackedResourceArmPaginatedResult - from ._models_py3 import ServiceManagedResourcesSettings - from ._models_py3 import ServicePrincipalAuthTypeWorkspaceConnectionProperties - from ._models_py3 import ServicePrincipalDatastoreCredentials - from ._models_py3 import ServicePrincipalDatastoreSecrets - from ._models_py3 import ServiceTagDestination - from ._models_py3 import ServiceTagOutboundRule - from ._models_py3 import SetupScripts - from ._models_py3 import SharedPrivateLinkResource - from ._models_py3 import Sku - from ._models_py3 import SkuCapacity - from ._models_py3 import SkuResource - from ._models_py3 import SkuResourceArmPaginatedResult - from ._models_py3 import SkuSetting - from ._models_py3 import SparkJob - from ._models_py3 import SparkJobEntry - from ._models_py3 import SparkJobPythonEntry - from ._models_py3 import SparkJobScalaEntry - from ._models_py3 import SparkResourceConfiguration - from ._models_py3 import SslConfiguration - from ._models_py3 import StackEnsembleSettings - from ._models_py3 import StaticInputData - from ._models_py3 import StatusMessage - from ._models_py3 import StorageAccountDetails - from ._models_py3 import SweepJob - from ._models_py3 import SweepJobLimits - from ._models_py3 import SynapseSpark - from ._models_py3 import SynapseSparkProperties - from ._models_py3 import SystemCreatedAcrAccount - from ._models_py3 import SystemCreatedStorageAccount - from ._models_py3 import SystemData - from ._models_py3 import SystemService - from ._models_py3 import TableFixedParameters - from ._models_py3 import TableParameterSubspace - from ._models_py3 import TableSweepSettings - from ._models_py3 import TableVertical - from ._models_py3 import TableVerticalFeaturizationSettings - from ._models_py3 import TableVerticalLimitSettings - from ._models_py3 import TargetLags - from ._models_py3 import TargetRollingWindowSize - from ._models_py3 import TargetUtilizationScaleSettings - from ._models_py3 import TensorFlow - from ._models_py3 import TextClassification - from ._models_py3 import TextClassificationMultilabel - from ._models_py3 import TextNer - from ._models_py3 import TmpfsOptions - from ._models_py3 import TopNFeaturesByAttribution - from ._models_py3 import TrackedResource - from ._models_py3 import TrailingInputData - from ._models_py3 import TrainingSettings - from ._models_py3 import TrialComponent - from ._models_py3 import TriggerBase - from ._models_py3 import TritonInferencingServer - from ._models_py3 import TritonModelJobInput - from ._models_py3 import TritonModelJobOutput - from ._models_py3 import TruncationSelectionPolicy - from ._models_py3 import UpdateWorkspaceQuotas - from ._models_py3 import UpdateWorkspaceQuotasResult - from ._models_py3 import UriFileDataVersion - from ._models_py3 import UriFileJobInput - from ._models_py3 import UriFileJobOutput - from ._models_py3 import UriFolderDataVersion - from ._models_py3 import UriFolderJobInput - from ._models_py3 import UriFolderJobOutput - from ._models_py3 import Usage - from ._models_py3 import UsageName - from ._models_py3 import UserAccountCredentials - from ._models_py3 import UserAssignedIdentity - from ._models_py3 import UserCreatedAcrAccount - from ._models_py3 import UserCreatedStorageAccount - from ._models_py3 import UserIdentity - from ._models_py3 import UsernamePasswordAuthTypeWorkspaceConnectionProperties - from ._models_py3 import VirtualMachine - from ._models_py3 import VirtualMachineImage - from ._models_py3 import VirtualMachineSchema - from ._models_py3 import VirtualMachineSchemaProperties - from ._models_py3 import VirtualMachineSecrets - from ._models_py3 import VirtualMachineSecretsSchema - from ._models_py3 import VirtualMachineSize - from ._models_py3 import VirtualMachineSizeListResult - from ._models_py3 import VirtualMachineSshCredentials - from ._models_py3 import VolumeDefinition - from ._models_py3 import VolumeOptions - from ._models_py3 import Webhook - from ._models_py3 import Workspace - from ._models_py3 import WorkspaceConnectionAccessKey - from ._models_py3 import WorkspaceConnectionApiKey - from ._models_py3 import WorkspaceConnectionManagedIdentity - from ._models_py3 import WorkspaceConnectionPersonalAccessToken - from ._models_py3 import WorkspaceConnectionPropertiesV2 - from ._models_py3 import WorkspaceConnectionPropertiesV2BasicResource - from ._models_py3 import WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult - from ._models_py3 import WorkspaceConnectionServicePrincipal - from ._models_py3 import WorkspaceConnectionSharedAccessSignature - from ._models_py3 import WorkspaceConnectionUpdateParameter - from ._models_py3 import WorkspaceConnectionUsernamePassword - from ._models_py3 import WorkspaceHubConfig - from ._models_py3 import WorkspaceListResult - from ._models_py3 import WorkspacePrivateEndpointResource - from ._models_py3 import WorkspaceUpdateParameters -except (SyntaxError, ImportError): - from ._models import AKS # type: ignore - from ._models import AKSSchema # type: ignore - from ._models import AKSSchemaProperties # type: ignore - from ._models import AccessKeyAuthTypeWorkspaceConnectionProperties # type: ignore - from ._models import AccountKeyDatastoreCredentials # type: ignore - from ._models import AccountKeyDatastoreSecrets # type: ignore - from ._models import AcrDetails # type: ignore - from ._models import AksComputeSecrets # type: ignore - from ._models import AksComputeSecretsProperties # type: ignore - from ._models import AksNetworkingConfiguration # type: ignore - from ._models import AllFeatures # type: ignore - from ._models import AllNodes # type: ignore - from ._models import AmlCompute # type: ignore - from ._models import AmlComputeNodeInformation # type: ignore - from ._models import AmlComputeNodesInformation # type: ignore - from ._models import AmlComputeProperties # type: ignore - from ._models import AmlComputeSchema # type: ignore - from ._models import AmlOperation # type: ignore - from ._models import AmlOperationListResult # type: ignore - from ._models import AmlToken # type: ignore - from ._models import AmlTokenComputeIdentity # type: ignore - from ._models import AmlUserFeature # type: ignore - from ._models import ApiKeyAuthWorkspaceConnectionProperties # type: ignore - from ._models import ArmResourceId # type: ignore - from ._models import AssetBase # type: ignore - from ._models import AssetContainer # type: ignore - from ._models import AssetJobInput # type: ignore - from ._models import AssetJobOutput # type: ignore - from ._models import AssetReferenceBase # type: ignore - from ._models import AssignedUser # type: ignore - from ._models import AutoDeleteSetting # type: ignore - from ._models import AutoForecastHorizon # type: ignore - from ._models import AutoMLJob # type: ignore - from ._models import AutoMLVertical # type: ignore - from ._models import AutoNCrossValidations # type: ignore - from ._models import AutoPauseProperties # type: ignore - from ._models import AutoScaleProperties # type: ignore - from ._models import AutoSeasonality # type: ignore - from ._models import AutoTargetLags # type: ignore - from ._models import AutoTargetRollingWindowSize # type: ignore - from ._models import AutologgerSettings # type: ignore - from ._models import AzMonMonitoringAlertNotificationSettings # type: ignore - from ._models import AzureBlobDatastore # type: ignore - from ._models import AzureDataLakeGen1Datastore # type: ignore - from ._models import AzureDataLakeGen2Datastore # type: ignore - from ._models import AzureDatastore # type: ignore - from ._models import AzureDevOpsWebhook # type: ignore - from ._models import AzureFileDatastore # type: ignore - from ._models import AzureMLBatchInferencingServer # type: ignore - from ._models import AzureMLOnlineInferencingServer # type: ignore - from ._models import BanditPolicy # type: ignore - from ._models import BaseEnvironmentId # type: ignore - from ._models import BaseEnvironmentSource # type: ignore - from ._models import BatchDeployment # type: ignore - from ._models import BatchDeploymentConfiguration # type: ignore - from ._models import BatchDeploymentProperties # type: ignore - from ._models import BatchDeploymentTrackedResourceArmPaginatedResult # type: ignore - from ._models import BatchEndpoint # type: ignore - from ._models import BatchEndpointDefaults # type: ignore - from ._models import BatchEndpointProperties # type: ignore - from ._models import BatchEndpointTrackedResourceArmPaginatedResult # type: ignore - from ._models import BatchPipelineComponentDeploymentConfiguration # type: ignore - from ._models import BatchRetrySettings # type: ignore - from ._models import BayesianSamplingAlgorithm # type: ignore - from ._models import BindOptions # type: ignore - from ._models import BlobReferenceForConsumptionDto # type: ignore - from ._models import BuildContext # type: ignore - from ._models import CategoricalDataDriftMetricThreshold # type: ignore - from ._models import CategoricalDataQualityMetricThreshold # type: ignore - from ._models import CategoricalPredictionDriftMetricThreshold # type: ignore - from ._models import CertificateDatastoreCredentials # type: ignore - from ._models import CertificateDatastoreSecrets # type: ignore - from ._models import Classification # type: ignore - from ._models import ClassificationModelPerformanceMetricThreshold # type: ignore - from ._models import ClassificationTrainingSettings # type: ignore - from ._models import ClusterUpdateParameters # type: ignore - from ._models import CocoExportSummary # type: ignore - from ._models import CodeConfiguration # type: ignore - from ._models import CodeContainer # type: ignore - from ._models import CodeContainerProperties # type: ignore - from ._models import CodeContainerResourceArmPaginatedResult # type: ignore - from ._models import CodeVersion # type: ignore - from ._models import CodeVersionProperties # type: ignore - from ._models import CodeVersionResourceArmPaginatedResult # type: ignore - from ._models import Collection # type: ignore - from ._models import ColumnTransformer # type: ignore - from ._models import CommandJob # type: ignore - from ._models import CommandJobLimits # type: ignore - from ._models import ComponentConfiguration # type: ignore - from ._models import ComponentContainer # type: ignore - from ._models import ComponentContainerProperties # type: ignore - from ._models import ComponentContainerResourceArmPaginatedResult # type: ignore - from ._models import ComponentVersion # type: ignore - from ._models import ComponentVersionProperties # type: ignore - from ._models import ComponentVersionResourceArmPaginatedResult # type: ignore - from ._models import Compute # type: ignore - from ._models import ComputeInstance # type: ignore - from ._models import ComputeInstanceApplication # type: ignore - from ._models import ComputeInstanceAutologgerSettings # type: ignore - from ._models import ComputeInstanceConnectivityEndpoints # type: ignore - from ._models import ComputeInstanceContainer # type: ignore - from ._models import ComputeInstanceCreatedBy # type: ignore - from ._models import ComputeInstanceDataDisk # type: ignore - from ._models import ComputeInstanceDataMount # type: ignore - from ._models import ComputeInstanceEnvironmentInfo # type: ignore - from ._models import ComputeInstanceLastOperation # type: ignore - from ._models import ComputeInstanceProperties # type: ignore - from ._models import ComputeInstanceSchema # type: ignore - from ._models import ComputeInstanceSshSettings # type: ignore - from ._models import ComputeInstanceVersion # type: ignore - from ._models import ComputeResource # type: ignore - from ._models import ComputeResourceSchema # type: ignore - from ._models import ComputeRuntimeDto # type: ignore - from ._models import ComputeSchedules # type: ignore - from ._models import ComputeSecrets # type: ignore - from ._models import ComputeStartStopSchedule # type: ignore - from ._models import ContainerResourceRequirements # type: ignore - from ._models import ContainerResourceSettings # type: ignore - from ._models import CosmosDbSettings # type: ignore - from ._models import CreateMonitorAction # type: ignore - from ._models import Cron # type: ignore - from ._models import CronTrigger # type: ignore - from ._models import CsvExportSummary # type: ignore - from ._models import CustomForecastHorizon # type: ignore - from ._models import CustomInferencingServer # type: ignore - from ._models import CustomKeys # type: ignore - from ._models import CustomKeysWorkspaceConnectionProperties # type: ignore - from ._models import CustomMetricThreshold # type: ignore - from ._models import CustomModelJobInput # type: ignore - from ._models import CustomModelJobOutput # type: ignore - from ._models import CustomMonitoringSignal # type: ignore - from ._models import CustomNCrossValidations # type: ignore - from ._models import CustomSeasonality # type: ignore - from ._models import CustomService # type: ignore - from ._models import CustomTargetLags # type: ignore - from ._models import CustomTargetRollingWindowSize # type: ignore - from ._models import DataCollector # type: ignore - from ._models import DataContainer # type: ignore - from ._models import DataContainerProperties # type: ignore - from ._models import DataContainerResourceArmPaginatedResult # type: ignore - from ._models import DataDriftMetricThresholdBase # type: ignore - from ._models import DataDriftMonitoringSignal # type: ignore - from ._models import DataFactory # type: ignore - from ._models import DataImport # type: ignore - from ._models import DataImportSource # type: ignore - from ._models import DataLakeAnalytics # type: ignore - from ._models import DataLakeAnalyticsSchema # type: ignore - from ._models import DataLakeAnalyticsSchemaProperties # type: ignore - from ._models import DataPathAssetReference # type: ignore - from ._models import DataQualityMetricThresholdBase # type: ignore - from ._models import DataQualityMonitoringSignal # type: ignore - from ._models import DataVersionBase # type: ignore - from ._models import DataVersionBaseProperties # type: ignore - from ._models import DataVersionBaseResourceArmPaginatedResult # type: ignore - from ._models import DatabaseSource # type: ignore - from ._models import Databricks # type: ignore - from ._models import DatabricksComputeSecrets # type: ignore - from ._models import DatabricksComputeSecretsProperties # type: ignore - from ._models import DatabricksProperties # type: ignore - from ._models import DatabricksSchema # type: ignore - from ._models import DatasetExportSummary # type: ignore - from ._models import Datastore # type: ignore - from ._models import DatastoreCredentials # type: ignore - from ._models import DatastoreProperties # type: ignore - from ._models import DatastoreResourceArmPaginatedResult # type: ignore - from ._models import DatastoreSecrets # type: ignore - from ._models import DefaultScaleSettings # type: ignore - from ._models import DeploymentLogs # type: ignore - from ._models import DeploymentLogsRequest # type: ignore - from ._models import DeploymentResourceConfiguration # type: ignore - from ._models import DiagnoseRequestProperties # type: ignore - from ._models import DiagnoseResponseResult # type: ignore - from ._models import DiagnoseResponseResultValue # type: ignore - from ._models import DiagnoseResult # type: ignore - from ._models import DiagnoseWorkspaceParameters # type: ignore - from ._models import DistributionConfiguration # type: ignore - from ._models import Docker # type: ignore - from ._models import EarlyTerminationPolicy # type: ignore - from ._models import EmailMonitoringAlertNotificationSettings # type: ignore - from ._models import EncryptionKeyVaultUpdateProperties # type: ignore - from ._models import EncryptionProperty # type: ignore - from ._models import EncryptionUpdateProperties # type: ignore - from ._models import Endpoint # type: ignore - from ._models import EndpointAuthKeys # type: ignore - from ._models import EndpointAuthToken # type: ignore - from ._models import EndpointDeploymentPropertiesBase # type: ignore - from ._models import EndpointPropertiesBase # type: ignore - from ._models import EndpointScheduleAction # type: ignore - from ._models import EnvironmentContainer # type: ignore - from ._models import EnvironmentContainerProperties # type: ignore - from ._models import EnvironmentContainerResourceArmPaginatedResult # type: ignore - from ._models import EnvironmentVariable # type: ignore - from ._models import EnvironmentVersion # type: ignore - from ._models import EnvironmentVersionProperties # type: ignore - from ._models import EnvironmentVersionResourceArmPaginatedResult # type: ignore - from ._models import ErrorAdditionalInfo # type: ignore - from ._models import ErrorDetail # type: ignore - from ._models import ErrorResponse # type: ignore - from ._models import EstimatedVMPrice # type: ignore - from ._models import EstimatedVMPrices # type: ignore - from ._models import ExportSummary # type: ignore - from ._models import ExternalFQDNResponse # type: ignore - from ._models import FQDNEndpoint # type: ignore - from ._models import FQDNEndpointDetail # type: ignore - from ._models import FQDNEndpoints # type: ignore - from ._models import FQDNEndpointsPropertyBag # type: ignore - from ._models import Feature # type: ignore - from ._models import FeatureAttributionDriftMonitoringSignal # type: ignore - from ._models import FeatureAttributionMetricThreshold # type: ignore - from ._models import FeatureProperties # type: ignore - from ._models import FeatureResourceArmPaginatedResult # type: ignore - from ._models import FeatureStoreSettings # type: ignore - from ._models import FeatureSubset # type: ignore - from ._models import FeatureWindow # type: ignore - from ._models import FeaturesetContainer # type: ignore - from ._models import FeaturesetContainerProperties # type: ignore - from ._models import FeaturesetContainerResourceArmPaginatedResult # type: ignore - from ._models import FeaturesetSpecification # type: ignore - from ._models import FeaturesetVersion # type: ignore - from ._models import FeaturesetVersionBackfillRequest # type: ignore - from ._models import FeaturesetVersionBackfillResponse # type: ignore - from ._models import FeaturesetVersionProperties # type: ignore - from ._models import FeaturesetVersionResourceArmPaginatedResult # type: ignore - from ._models import FeaturestoreEntityContainer # type: ignore - from ._models import FeaturestoreEntityContainerProperties # type: ignore - from ._models import FeaturestoreEntityContainerResourceArmPaginatedResult # type: ignore - from ._models import FeaturestoreEntityVersion # type: ignore - from ._models import FeaturestoreEntityVersionProperties # type: ignore - from ._models import FeaturestoreEntityVersionResourceArmPaginatedResult # type: ignore - from ._models import FeaturizationSettings # type: ignore - from ._models import FileSystemSource # type: ignore - from ._models import FixedInputData # type: ignore - from ._models import FlavorData # type: ignore - from ._models import ForecastHorizon # type: ignore - from ._models import Forecasting # type: ignore - from ._models import ForecastingSettings # type: ignore - from ._models import ForecastingTrainingSettings # type: ignore - from ._models import FqdnOutboundRule # type: ignore - from ._models import GenerationSafetyQualityMetricThreshold # type: ignore - from ._models import GenerationSafetyQualityMonitoringSignal # type: ignore - from ._models import GenerationTokenStatisticsMetricThreshold # type: ignore - from ._models import GenerationTokenStatisticsSignal # type: ignore - from ._models import GridSamplingAlgorithm # type: ignore - from ._models import HDInsight # type: ignore - from ._models import HDInsightProperties # type: ignore - from ._models import HDInsightSchema # type: ignore - from ._models import HdfsDatastore # type: ignore - from ._models import IdAssetReference # type: ignore - from ._models import IdentityConfiguration # type: ignore - from ._models import IdentityForCmk # type: ignore - from ._models import IdleShutdownSetting # type: ignore - from ._models import Image # type: ignore - from ._models import ImageClassification # type: ignore - from ._models import ImageClassificationBase # type: ignore - from ._models import ImageClassificationMultilabel # type: ignore - from ._models import ImageInstanceSegmentation # type: ignore - from ._models import ImageLimitSettings # type: ignore - from ._models import ImageMetadata # type: ignore - from ._models import ImageModelDistributionSettings # type: ignore - from ._models import ImageModelDistributionSettingsClassification # type: ignore - from ._models import ImageModelDistributionSettingsObjectDetection # type: ignore - from ._models import ImageModelSettings # type: ignore - from ._models import ImageModelSettingsClassification # type: ignore - from ._models import ImageModelSettingsObjectDetection # type: ignore - from ._models import ImageObjectDetection # type: ignore - from ._models import ImageObjectDetectionBase # type: ignore - from ._models import ImageSweepSettings # type: ignore - from ._models import ImageVertical # type: ignore - from ._models import ImportDataAction # type: ignore - from ._models import IndexColumn # type: ignore - from ._models import InferenceContainerProperties # type: ignore - from ._models import InferencingServer # type: ignore - from ._models import InstanceTypeSchema # type: ignore - from ._models import InstanceTypeSchemaResources # type: ignore - from ._models import IntellectualProperty # type: ignore - from ._models import JobBase # type: ignore - from ._models import JobBaseProperties # type: ignore - from ._models import JobBaseResourceArmPaginatedResult # type: ignore - from ._models import JobInput # type: ignore - from ._models import JobLimits # type: ignore - from ._models import JobOutput # type: ignore - from ._models import JobResourceConfiguration # type: ignore - from ._models import JobScheduleAction # type: ignore - from ._models import JobService # type: ignore - from ._models import KerberosCredentials # type: ignore - from ._models import KerberosKeytabCredentials # type: ignore - from ._models import KerberosKeytabSecrets # type: ignore - from ._models import KerberosPasswordCredentials # type: ignore - from ._models import KerberosPasswordSecrets # type: ignore - from ._models import KeyVaultProperties # type: ignore - from ._models import Kubernetes # type: ignore - from ._models import KubernetesOnlineDeployment # type: ignore - from ._models import KubernetesProperties # type: ignore - from ._models import KubernetesSchema # type: ignore - from ._models import LabelCategory # type: ignore - from ._models import LabelClass # type: ignore - from ._models import LabelingDataConfiguration # type: ignore - from ._models import LabelingJob # type: ignore - from ._models import LabelingJobImageProperties # type: ignore - from ._models import LabelingJobInstructions # type: ignore - from ._models import LabelingJobMediaProperties # type: ignore - from ._models import LabelingJobProperties # type: ignore - from ._models import LabelingJobResourceArmPaginatedResult # type: ignore - from ._models import LabelingJobTextProperties # type: ignore - from ._models import LakeHouseArtifact # type: ignore - from ._models import ListAmlUserFeatureResult # type: ignore - from ._models import ListNotebookKeysResult # type: ignore - from ._models import ListStorageAccountKeysResult # type: ignore - from ._models import ListUsagesResult # type: ignore - from ._models import ListWorkspaceKeysResult # type: ignore - from ._models import ListWorkspaceQuotas # type: ignore - from ._models import LiteralJobInput # type: ignore - from ._models import MLAssistConfiguration # type: ignore - from ._models import MLAssistConfigurationDisabled # type: ignore - from ._models import MLAssistConfigurationEnabled # type: ignore - from ._models import MLFlowModelJobInput # type: ignore - from ._models import MLFlowModelJobOutput # type: ignore - from ._models import MLTableData # type: ignore - from ._models import MLTableJobInput # type: ignore - from ._models import MLTableJobOutput # type: ignore - from ._models import ManagedComputeIdentity # type: ignore - from ._models import ManagedIdentity # type: ignore - from ._models import ManagedIdentityAuthTypeWorkspaceConnectionProperties # type: ignore - from ._models import ManagedNetworkProvisionOptions # type: ignore - from ._models import ManagedNetworkProvisionStatus # type: ignore - from ._models import ManagedNetworkSettings # type: ignore - from ._models import ManagedOnlineDeployment # type: ignore - from ._models import ManagedServiceIdentity # type: ignore - from ._models import MaterializationComputeResource # type: ignore - from ._models import MaterializationSettings # type: ignore - from ._models import MedianStoppingPolicy # type: ignore - from ._models import ModelConfiguration # type: ignore - from ._models import ModelContainer # type: ignore - from ._models import ModelContainerProperties # type: ignore - from ._models import ModelContainerResourceArmPaginatedResult # type: ignore - from ._models import ModelPackageInput # type: ignore - from ._models import ModelPerformanceMetricThresholdBase # type: ignore - from ._models import ModelPerformanceSignal # type: ignore - from ._models import ModelProfile # type: ignore - from ._models import ModelVersion # type: ignore - from ._models import ModelVersionProperties # type: ignore - from ._models import ModelVersionResourceArmPaginatedResult # type: ignore - from ._models import MonitorComputeConfigurationBase # type: ignore - from ._models import MonitorComputeIdentityBase # type: ignore - from ._models import MonitorDefinition # type: ignore - from ._models import MonitorServerlessSparkCompute # type: ignore - from ._models import MonitoringAlertNotificationSettingsBase # type: ignore - from ._models import MonitoringDataSegment # type: ignore - from ._models import MonitoringFeatureFilterBase # type: ignore - from ._models import MonitoringInputDataBase # type: ignore - from ._models import MonitoringSignalBase # type: ignore - from ._models import MonitoringTarget # type: ignore - from ._models import MonitoringThreshold # type: ignore - from ._models import MonitoringWorkspaceConnection # type: ignore - from ._models import Mpi # type: ignore - from ._models import NCrossValidations # type: ignore - from ._models import NlpFixedParameters # type: ignore - from ._models import NlpParameterSubspace # type: ignore - from ._models import NlpSweepSettings # type: ignore - from ._models import NlpVertical # type: ignore - from ._models import NlpVerticalFeaturizationSettings # type: ignore - from ._models import NlpVerticalLimitSettings # type: ignore - from ._models import NodeStateCounts # type: ignore - from ._models import Nodes # type: ignore - from ._models import NoneAuthTypeWorkspaceConnectionProperties # type: ignore - from ._models import NoneDatastoreCredentials # type: ignore - from ._models import NotebookAccessTokenResult # type: ignore - from ._models import NotebookPreparationError # type: ignore - from ._models import NotebookResourceInfo # type: ignore - from ._models import NotificationSetting # type: ignore - from ._models import NumericalDataDriftMetricThreshold # type: ignore - from ._models import NumericalDataQualityMetricThreshold # type: ignore - from ._models import NumericalPredictionDriftMetricThreshold # type: ignore - from ._models import Objective # type: ignore - from ._models import OneLakeArtifact # type: ignore - from ._models import OneLakeDatastore # type: ignore - from ._models import OnlineDeployment # type: ignore - from ._models import OnlineDeploymentProperties # type: ignore - from ._models import OnlineDeploymentTrackedResourceArmPaginatedResult # type: ignore - from ._models import OnlineEndpoint # type: ignore - from ._models import OnlineEndpointProperties # type: ignore - from ._models import OnlineEndpointTrackedResourceArmPaginatedResult # type: ignore - from ._models import OnlineInferenceConfiguration # type: ignore - from ._models import OnlineRequestSettings # type: ignore - from ._models import OnlineScaleSettings # type: ignore - from ._models import OperationDisplay # type: ignore - from ._models import OsPatchingStatus # type: ignore - from ._models import OutboundRule # type: ignore - from ._models import OutboundRuleBasicResource # type: ignore - from ._models import OutboundRuleListResult # type: ignore - from ._models import OutputPathAssetReference # type: ignore - from ._models import PATAuthTypeWorkspaceConnectionProperties # type: ignore - from ._models import PackageInputPathBase # type: ignore - from ._models import PackageInputPathId # type: ignore - from ._models import PackageInputPathUrl # type: ignore - from ._models import PackageInputPathVersion # type: ignore - from ._models import PackageRequest # type: ignore - from ._models import PackageResponse # type: ignore - from ._models import PaginatedComputeResourcesList # type: ignore - from ._models import PartialBatchDeployment # type: ignore - from ._models import PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties # type: ignore - from ._models import PartialJobBase # type: ignore - from ._models import PartialJobBasePartialResource # type: ignore - from ._models import PartialManagedServiceIdentity # type: ignore - from ._models import PartialMinimalTrackedResource # type: ignore - from ._models import PartialMinimalTrackedResourceWithIdentity # type: ignore - from ._models import PartialMinimalTrackedResourceWithSku # type: ignore - from ._models import PartialMinimalTrackedResourceWithSkuAndIdentity # type: ignore - from ._models import PartialNotificationSetting # type: ignore - from ._models import PartialRegistryPartialTrackedResource # type: ignore - from ._models import PartialSku # type: ignore - from ._models import Password # type: ignore - from ._models import PendingUploadCredentialDto # type: ignore - from ._models import PendingUploadRequestDto # type: ignore - from ._models import PendingUploadResponseDto # type: ignore - from ._models import PersonalComputeInstanceSettings # type: ignore - from ._models import PipelineJob # type: ignore - from ._models import PredictionDriftMetricThresholdBase # type: ignore - from ._models import PredictionDriftMonitoringSignal # type: ignore - from ._models import PrivateEndpoint # type: ignore - from ._models import PrivateEndpointConnection # type: ignore - from ._models import PrivateEndpointConnectionListResult # type: ignore - from ._models import PrivateEndpointDestination # type: ignore - from ._models import PrivateEndpointOutboundRule # type: ignore - from ._models import PrivateEndpointResource # type: ignore - from ._models import PrivateLinkResource # type: ignore - from ._models import PrivateLinkResourceListResult # type: ignore - from ._models import PrivateLinkServiceConnectionState # type: ignore - from ._models import ProbeSettings # type: ignore - from ._models import ProgressMetrics # type: ignore - from ._models import PyTorch # type: ignore - from ._models import QueueSettings # type: ignore - from ._models import QuotaBaseProperties # type: ignore - from ._models import QuotaUpdateParameters # type: ignore - from ._models import RandomSamplingAlgorithm # type: ignore - from ._models import Ray # type: ignore - from ._models import Recurrence # type: ignore - from ._models import RecurrenceSchedule # type: ignore - from ._models import RecurrenceTrigger # type: ignore - from ._models import RegenerateEndpointKeysRequest # type: ignore - from ._models import Registry # type: ignore - from ._models import RegistryListCredentialsResult # type: ignore - from ._models import RegistryPartialManagedServiceIdentity # type: ignore - from ._models import RegistryPrivateEndpointConnection # type: ignore - from ._models import RegistryPrivateLinkServiceConnectionState # type: ignore - from ._models import RegistryRegionArmDetails # type: ignore - from ._models import RegistryTrackedResourceArmPaginatedResult # type: ignore - from ._models import Regression # type: ignore - from ._models import RegressionModelPerformanceMetricThreshold # type: ignore - from ._models import RegressionTrainingSettings # type: ignore - from ._models import RequestLogging # type: ignore - from ._models import ResizeSchema # type: ignore - from ._models import Resource # type: ignore - from ._models import ResourceBase # type: ignore - from ._models import ResourceConfiguration # type: ignore - from ._models import ResourceId # type: ignore - from ._models import ResourceName # type: ignore - from ._models import ResourceQuota # type: ignore - from ._models import Route # type: ignore - from ._models import SASAuthTypeWorkspaceConnectionProperties # type: ignore - from ._models import SASCredentialDto # type: ignore - from ._models import SamplingAlgorithm # type: ignore - from ._models import SasDatastoreCredentials # type: ignore - from ._models import SasDatastoreSecrets # type: ignore - from ._models import ScaleSettings # type: ignore - from ._models import ScaleSettingsInformation # type: ignore - from ._models import Schedule # type: ignore - from ._models import ScheduleActionBase # type: ignore - from ._models import ScheduleBase # type: ignore - from ._models import ScheduleProperties # type: ignore - from ._models import ScheduleResourceArmPaginatedResult # type: ignore - from ._models import ScriptReference # type: ignore - from ._models import ScriptsToExecute # type: ignore - from ._models import Seasonality # type: ignore - from ._models import SecretConfiguration # type: ignore - from ._models import ServerlessEndpoint # type: ignore - from ._models import ServerlessEndpointProperties # type: ignore - from ._models import ServerlessEndpointTrackedResourceArmPaginatedResult # type: ignore - from ._models import ServiceManagedResourcesSettings # type: ignore - from ._models import ServicePrincipalAuthTypeWorkspaceConnectionProperties # type: ignore - from ._models import ServicePrincipalDatastoreCredentials # type: ignore - from ._models import ServicePrincipalDatastoreSecrets # type: ignore - from ._models import ServiceTagDestination # type: ignore - from ._models import ServiceTagOutboundRule # type: ignore - from ._models import SetupScripts # type: ignore - from ._models import SharedPrivateLinkResource # type: ignore - from ._models import Sku # type: ignore - from ._models import SkuCapacity # type: ignore - from ._models import SkuResource # type: ignore - from ._models import SkuResourceArmPaginatedResult # type: ignore - from ._models import SkuSetting # type: ignore - from ._models import SparkJob # type: ignore - from ._models import SparkJobEntry # type: ignore - from ._models import SparkJobPythonEntry # type: ignore - from ._models import SparkJobScalaEntry # type: ignore - from ._models import SparkResourceConfiguration # type: ignore - from ._models import SslConfiguration # type: ignore - from ._models import StackEnsembleSettings # type: ignore - from ._models import StaticInputData # type: ignore - from ._models import StatusMessage # type: ignore - from ._models import StorageAccountDetails # type: ignore - from ._models import SweepJob # type: ignore - from ._models import SweepJobLimits # type: ignore - from ._models import SynapseSpark # type: ignore - from ._models import SynapseSparkProperties # type: ignore - from ._models import SystemCreatedAcrAccount # type: ignore - from ._models import SystemCreatedStorageAccount # type: ignore - from ._models import SystemData # type: ignore - from ._models import SystemService # type: ignore - from ._models import TableFixedParameters # type: ignore - from ._models import TableParameterSubspace # type: ignore - from ._models import TableSweepSettings # type: ignore - from ._models import TableVertical # type: ignore - from ._models import TableVerticalFeaturizationSettings # type: ignore - from ._models import TableVerticalLimitSettings # type: ignore - from ._models import TargetLags # type: ignore - from ._models import TargetRollingWindowSize # type: ignore - from ._models import TargetUtilizationScaleSettings # type: ignore - from ._models import TensorFlow # type: ignore - from ._models import TextClassification # type: ignore - from ._models import TextClassificationMultilabel # type: ignore - from ._models import TextNer # type: ignore - from ._models import TmpfsOptions # type: ignore - from ._models import TopNFeaturesByAttribution # type: ignore - from ._models import TrackedResource # type: ignore - from ._models import TrailingInputData # type: ignore - from ._models import TrainingSettings # type: ignore - from ._models import TrialComponent # type: ignore - from ._models import TriggerBase # type: ignore - from ._models import TritonInferencingServer # type: ignore - from ._models import TritonModelJobInput # type: ignore - from ._models import TritonModelJobOutput # type: ignore - from ._models import TruncationSelectionPolicy # type: ignore - from ._models import UpdateWorkspaceQuotas # type: ignore - from ._models import UpdateWorkspaceQuotasResult # type: ignore - from ._models import UriFileDataVersion # type: ignore - from ._models import UriFileJobInput # type: ignore - from ._models import UriFileJobOutput # type: ignore - from ._models import UriFolderDataVersion # type: ignore - from ._models import UriFolderJobInput # type: ignore - from ._models import UriFolderJobOutput # type: ignore - from ._models import Usage # type: ignore - from ._models import UsageName # type: ignore - from ._models import UserAccountCredentials # type: ignore - from ._models import UserAssignedIdentity # type: ignore - from ._models import UserCreatedAcrAccount # type: ignore - from ._models import UserCreatedStorageAccount # type: ignore - from ._models import UserIdentity # type: ignore - from ._models import UsernamePasswordAuthTypeWorkspaceConnectionProperties # type: ignore - from ._models import VirtualMachine # type: ignore - from ._models import VirtualMachineImage # type: ignore - from ._models import VirtualMachineSchema # type: ignore - from ._models import VirtualMachineSchemaProperties # type: ignore - from ._models import VirtualMachineSecrets # type: ignore - from ._models import VirtualMachineSecretsSchema # type: ignore - from ._models import VirtualMachineSize # type: ignore - from ._models import VirtualMachineSizeListResult # type: ignore - from ._models import VirtualMachineSshCredentials # type: ignore - from ._models import VolumeDefinition # type: ignore - from ._models import VolumeOptions # type: ignore - from ._models import Webhook # type: ignore - from ._models import Workspace # type: ignore - from ._models import WorkspaceConnectionAccessKey # type: ignore - from ._models import WorkspaceConnectionApiKey # type: ignore - from ._models import WorkspaceConnectionManagedIdentity # type: ignore - from ._models import WorkspaceConnectionPersonalAccessToken # type: ignore - from ._models import WorkspaceConnectionPropertiesV2 # type: ignore - from ._models import WorkspaceConnectionPropertiesV2BasicResource # type: ignore - from ._models import WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult # type: ignore - from ._models import WorkspaceConnectionServicePrincipal # type: ignore - from ._models import WorkspaceConnectionSharedAccessSignature # type: ignore - from ._models import WorkspaceConnectionUpdateParameter # type: ignore - from ._models import WorkspaceConnectionUsernamePassword # type: ignore - from ._models import WorkspaceHubConfig # type: ignore - from ._models import WorkspaceListResult # type: ignore - from ._models import WorkspacePrivateEndpointResource # type: ignore - from ._models import WorkspaceUpdateParameters # type: ignore - -from ._azure_machine_learning_workspaces_enums import ( +from ._azure_machine_learning_services_enums import ( + ActionType, AllocationState, ApplicationSharingPolicy, AssetProvisioningState, @@ -1197,8 +41,8 @@ DataAvailabilityStatus, DataCollectionMode, DataImportSourceType, - DataType, DatastoreType, + DataType, DeploymentProvisioningState, DiagnoseResultLevel, DistributionType, @@ -1246,13 +90,13 @@ LogTrainingMetrics, LogValidationLoss, LogVerbosity, - MLAssistConfigurationType, - MLFlowAutologgerState, ManagedNetworkStatus, ManagedServiceIdentityType, MaterializationStoreType, MediaType, + MLAssistConfigurationType, MlflowAutologger, + MLFlowAutologgerState, ModelSize, ModelTaskType, MonitorComputeIdentityType, @@ -1282,6 +126,7 @@ OperationStatus, OperationTrigger, OrderString, + Origin, OsType, OutputDeliveryMode, PackageBuildState, @@ -1342,776 +187,1364 @@ UnitOfMeasure, UsageUnit, UseStl, - VMPriceOSType, - VMTier, ValidationMetricType, + VMPriceOSType, VmPriority, + VMTier, VolumeDefinitionType, WebhookType, WeekDay, ) +from ._models_py3 import ( + AKS, + AccessKeyAuthTypeWorkspaceConnectionProperties, + AccountKeyDatastoreCredentials, + AccountKeyDatastoreSecrets, + AcrDetails, + AksComputeSecrets, + AksComputeSecretsProperties, + AksNetworkingConfiguration, + AKSSchema, + AKSSchemaProperties, + AllFeatures, + AllNodes, + AmlCompute, + AmlComputeNodeInformation, + AmlComputeNodesInformation, + AmlComputeProperties, + AmlComputeSchema, + AmlToken, + AmlTokenComputeIdentity, + AmlUserFeature, + ApiKeyAuthWorkspaceConnectionProperties, + ArmResourceId, + AssetBase, + AssetContainer, + AssetJobInput, + AssetJobOutput, + AssetReferenceBase, + AssignedUser, + AutoDeleteSetting, + AutoForecastHorizon, + AutologgerSettings, + AutoMLJob, + AutoMLVertical, + AutoNCrossValidations, + AutoPauseProperties, + AutoScaleProperties, + AutoSeasonality, + AutoTargetLags, + AutoTargetRollingWindowSize, + AzMonMonitoringAlertNotificationSettings, + AzureBlobDatastore, + AzureDataLakeGen1Datastore, + AzureDataLakeGen2Datastore, + AzureDatastore, + AzureDevOpsWebhook, + AzureFileDatastore, + AzureMLBatchInferencingServer, + AzureMLOnlineInferencingServer, + BanditPolicy, + BaseEnvironmentId, + BaseEnvironmentSource, + BatchDeployment, + BatchDeploymentConfiguration, + BatchDeploymentProperties, + BatchDeploymentTrackedResourceArmPaginatedResult, + BatchEndpoint, + BatchEndpointDefaults, + BatchEndpointProperties, + BatchEndpointTrackedResourceArmPaginatedResult, + BatchPipelineComponentDeploymentConfiguration, + BatchRetrySettings, + BayesianSamplingAlgorithm, + BindOptions, + BlobReferenceForConsumptionDto, + BuildContext, + CategoricalDataDriftMetricThreshold, + CategoricalDataQualityMetricThreshold, + CategoricalPredictionDriftMetricThreshold, + CertificateDatastoreCredentials, + CertificateDatastoreSecrets, + Classification, + ClassificationModelPerformanceMetricThreshold, + ClassificationTrainingSettings, + ClusterUpdateParameters, + CocoExportSummary, + CodeConfiguration, + CodeContainer, + CodeContainerProperties, + CodeContainerResourceArmPaginatedResult, + CodeVersion, + CodeVersionProperties, + CodeVersionResourceArmPaginatedResult, + Collection, + ColumnTransformer, + CommandJob, + CommandJobLimits, + ComponentConfiguration, + ComponentContainer, + ComponentContainerProperties, + ComponentContainerResourceArmPaginatedResult, + ComponentVersion, + ComponentVersionProperties, + ComponentVersionResourceArmPaginatedResult, + Compute, + ComputeInstance, + ComputeInstanceApplication, + ComputeInstanceAutologgerSettings, + ComputeInstanceConnectivityEndpoints, + ComputeInstanceContainer, + ComputeInstanceCreatedBy, + ComputeInstanceDataDisk, + ComputeInstanceDataMount, + ComputeInstanceEnvironmentInfo, + ComputeInstanceLastOperation, + ComputeInstanceProperties, + ComputeInstanceSchema, + ComputeInstanceSshSettings, + ComputeInstanceVersion, + ComputeResource, + ComputeResourceSchema, + ComputeRuntimeDto, + ComputeSchedules, + ComputeSecrets, + ComputeStartStopSchedule, + ContainerResourceRequirements, + ContainerResourceSettings, + CosmosDbSettings, + CreateMonitorAction, + Cron, + CronTrigger, + CsvExportSummary, + CustomForecastHorizon, + CustomInferencingServer, + CustomKeys, + CustomKeysWorkspaceConnectionProperties, + CustomMetricThreshold, + CustomModelJobInput, + CustomModelJobOutput, + CustomMonitoringSignal, + CustomNCrossValidations, + CustomSeasonality, + CustomService, + CustomTargetLags, + CustomTargetRollingWindowSize, + DatabaseSource, + Databricks, + DatabricksComputeSecrets, + DatabricksComputeSecretsProperties, + DatabricksProperties, + DatabricksSchema, + DataCollector, + DataContainer, + DataContainerProperties, + DataContainerResourceArmPaginatedResult, + DataDriftMetricThresholdBase, + DataDriftMonitoringSignal, + DataFactory, + DataImport, + DataImportSource, + DataLakeAnalytics, + DataLakeAnalyticsSchema, + DataLakeAnalyticsSchemaProperties, + DataPathAssetReference, + DataQualityMetricThresholdBase, + DataQualityMonitoringSignal, + DatasetExportSummary, + Datastore, + DatastoreCredentials, + DatastoreProperties, + DatastoreResourceArmPaginatedResult, + DatastoreSecrets, + DataVersionBase, + DataVersionBaseProperties, + DataVersionBaseResourceArmPaginatedResult, + DefaultScaleSettings, + DeploymentLogs, + DeploymentLogsRequest, + DeploymentResourceConfiguration, + DiagnoseRequestProperties, + DiagnoseResponseResult, + DiagnoseResponseResultValue, + DiagnoseResult, + DiagnoseWorkspaceParameters, + DistributionConfiguration, + Docker, + EarlyTerminationPolicy, + EmailMonitoringAlertNotificationSettings, + EncryptionKeyVaultUpdateProperties, + EncryptionProperty, + EncryptionUpdateProperties, + Endpoint, + EndpointAuthKeys, + EndpointAuthToken, + EndpointDeploymentPropertiesBase, + EndpointPropertiesBase, + EndpointScheduleAction, + EnvironmentContainer, + EnvironmentContainerProperties, + EnvironmentContainerResourceArmPaginatedResult, + EnvironmentVariable, + EnvironmentVersion, + EnvironmentVersionProperties, + EnvironmentVersionResourceArmPaginatedResult, + ErrorAdditionalInfo, + ErrorDetail, + ErrorResponse, + EstimatedVMPrice, + EstimatedVMPrices, + ExportSummary, + ExternalFQDNResponse, + Feature, + FeatureAttributionDriftMonitoringSignal, + FeatureAttributionMetricThreshold, + FeatureProperties, + FeatureResourceArmPaginatedResult, + FeaturesetContainer, + FeaturesetContainerProperties, + FeaturesetContainerResourceArmPaginatedResult, + FeaturesetSpecification, + FeaturesetVersion, + FeaturesetVersionBackfillRequest, + FeaturesetVersionBackfillResponse, + FeaturesetVersionProperties, + FeaturesetVersionResourceArmPaginatedResult, + FeaturestoreEntityContainer, + FeaturestoreEntityContainerProperties, + FeaturestoreEntityContainerResourceArmPaginatedResult, + FeaturestoreEntityVersion, + FeaturestoreEntityVersionProperties, + FeaturestoreEntityVersionResourceArmPaginatedResult, + FeatureStoreSettings, + FeatureSubset, + FeatureWindow, + FeaturizationSettings, + FileSystemSource, + FixedInputData, + FlavorData, + ForecastHorizon, + Forecasting, + ForecastingSettings, + ForecastingTrainingSettings, + FQDNEndpoint, + FQDNEndpointDetail, + FQDNEndpoints, + FQDNEndpointsPropertyBag, + FqdnOutboundRule, + GenerationSafetyQualityMetricThreshold, + GenerationSafetyQualityMonitoringSignal, + GenerationTokenStatisticsMetricThreshold, + GenerationTokenStatisticsSignal, + GridSamplingAlgorithm, + HdfsDatastore, + HDInsight, + HDInsightProperties, + HDInsightSchema, + IdAssetReference, + IdentityConfiguration, + IdentityForCmk, + IdleShutdownSetting, + Image, + ImageClassification, + ImageClassificationBase, + ImageClassificationMultilabel, + ImageInstanceSegmentation, + ImageLimitSettings, + ImageMetadata, + ImageModelDistributionSettings, + ImageModelDistributionSettingsClassification, + ImageModelDistributionSettingsObjectDetection, + ImageModelSettings, + ImageModelSettingsClassification, + ImageModelSettingsObjectDetection, + ImageObjectDetection, + ImageObjectDetectionBase, + ImageSweepSettings, + ImageVertical, + ImportDataAction, + IndexColumn, + InferenceContainerProperties, + InferencingServer, + InstanceTypeSchema, + InstanceTypeSchemaResources, + IntellectualProperty, + JobBase, + JobBaseProperties, + JobBaseResourceArmPaginatedResult, + JobInput, + JobLimits, + JobOutput, + JobResourceConfiguration, + JobScheduleAction, + JobService, + KerberosCredentials, + KerberosKeytabCredentials, + KerberosKeytabSecrets, + KerberosPasswordCredentials, + KerberosPasswordSecrets, + KeyVaultProperties, + Kubernetes, + KubernetesOnlineDeployment, + KubernetesProperties, + KubernetesSchema, + LabelCategory, + LabelClass, + LabelingDataConfiguration, + LabelingJob, + LabelingJobImageProperties, + LabelingJobInstructions, + LabelingJobMediaProperties, + LabelingJobProperties, + LabelingJobResourceArmPaginatedResult, + LabelingJobTextProperties, + LakeHouseArtifact, + ListAmlUserFeatureResult, + ListNotebookKeysResult, + ListStorageAccountKeysResult, + ListUsagesResult, + ListWorkspaceKeysResult, + ListWorkspaceQuotas, + LiteralJobInput, + ManagedComputeIdentity, + ManagedIdentity, + ManagedIdentityAuthTypeWorkspaceConnectionProperties, + ManagedNetworkProvisionOptions, + ManagedNetworkProvisionStatus, + ManagedNetworkSettings, + ManagedOnlineDeployment, + ManagedServiceIdentity, + MaterializationComputeResource, + MaterializationSettings, + MedianStoppingPolicy, + MLAssistConfiguration, + MLAssistConfigurationDisabled, + MLAssistConfigurationEnabled, + MLFlowModelJobInput, + MLFlowModelJobOutput, + MLTableData, + MLTableJobInput, + MLTableJobOutput, + ModelConfiguration, + ModelContainer, + ModelContainerProperties, + ModelContainerResourceArmPaginatedResult, + ModelPackageInput, + ModelPerformanceMetricThresholdBase, + ModelPerformanceSignal, + ModelProfile, + ModelVersion, + ModelVersionProperties, + ModelVersionResourceArmPaginatedResult, + MonitorComputeConfigurationBase, + MonitorComputeIdentityBase, + MonitorDefinition, + MonitoringAlertNotificationSettingsBase, + MonitoringDataSegment, + MonitoringFeatureFilterBase, + MonitoringInputDataBase, + MonitoringSignalBase, + MonitoringTarget, + MonitoringThreshold, + MonitoringWorkspaceConnection, + MonitorServerlessSparkCompute, + Mpi, + NCrossValidations, + NlpFixedParameters, + NlpParameterSubspace, + NlpSweepSettings, + NlpVertical, + NlpVerticalFeaturizationSettings, + NlpVerticalLimitSettings, + Nodes, + NodeStateCounts, + NoneAuthTypeWorkspaceConnectionProperties, + NoneDatastoreCredentials, + NotebookAccessTokenResult, + NotebookPreparationError, + NotebookResourceInfo, + NotificationSetting, + NumericalDataDriftMetricThreshold, + NumericalDataQualityMetricThreshold, + NumericalPredictionDriftMetricThreshold, + Objective, + OneLakeArtifact, + OneLakeDatastore, + OnlineDeployment, + OnlineDeploymentProperties, + OnlineDeploymentTrackedResourceArmPaginatedResult, + OnlineEndpoint, + OnlineEndpointProperties, + OnlineEndpointTrackedResourceArmPaginatedResult, + OnlineInferenceConfiguration, + OnlineRequestSettings, + OnlineScaleSettings, + Operation, + OperationDisplay, + OperationListResult, + OsPatchingStatus, + OutboundRule, + OutboundRuleBasicResource, + OutboundRuleListResult, + OutputPathAssetReference, + PackageInputPathBase, + PackageInputPathId, + PackageInputPathUrl, + PackageInputPathVersion, + PackageRequest, + PackageResponse, + PaginatedComputeResourcesList, + PartialBatchDeployment, + PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, + PartialJobBase, + PartialJobBasePartialResource, + PartialManagedServiceIdentity, + PartialMinimalTrackedResource, + PartialMinimalTrackedResourceWithIdentity, + PartialMinimalTrackedResourceWithSku, + PartialMinimalTrackedResourceWithSkuAndIdentity, + PartialNotificationSetting, + PartialRegistryPartialTrackedResource, + PartialSku, + Password, + PATAuthTypeWorkspaceConnectionProperties, + PendingUploadCredentialDto, + PendingUploadRequestDto, + PendingUploadResponseDto, + PersonalComputeInstanceSettings, + PipelineJob, + PredictionDriftMetricThresholdBase, + PredictionDriftMonitoringSignal, + PrivateEndpoint, + PrivateEndpointConnection, + PrivateEndpointConnectionListResult, + PrivateEndpointDestination, + PrivateEndpointOutboundRule, + PrivateEndpointResource, + PrivateLinkResource, + PrivateLinkResourceListResult, + PrivateLinkServiceConnectionState, + ProbeSettings, + ProgressMetrics, + PyTorch, + QueueSettings, + QuotaBaseProperties, + QuotaUpdateParameters, + RandomSamplingAlgorithm, + Ray, + Recurrence, + RecurrenceSchedule, + RecurrenceTrigger, + RegenerateEndpointKeysRequest, + Registry, + RegistryListCredentialsResult, + RegistryPartialManagedServiceIdentity, + RegistryPrivateEndpointConnection, + RegistryPrivateLinkServiceConnectionState, + RegistryRegionArmDetails, + RegistryTrackedResourceArmPaginatedResult, + Regression, + RegressionModelPerformanceMetricThreshold, + RegressionTrainingSettings, + RequestLogging, + ResizeSchema, + Resource, + ResourceBase, + ResourceConfiguration, + ResourceId, + ResourceName, + ResourceQuota, + Route, + SamplingAlgorithm, + SASAuthTypeWorkspaceConnectionProperties, + SASCredentialDto, + SasDatastoreCredentials, + SasDatastoreSecrets, + ScaleSettings, + ScaleSettingsInformation, + Schedule, + ScheduleActionBase, + ScheduleBase, + ScheduleProperties, + ScheduleResourceArmPaginatedResult, + ScriptReference, + ScriptsToExecute, + Seasonality, + SecretConfiguration, + ServerlessComputeSettings, + ServerlessEndpoint, + ServerlessEndpointProperties, + ServerlessEndpointTrackedResourceArmPaginatedResult, + ServiceManagedResourcesSettings, + ServicePrincipalAuthTypeWorkspaceConnectionProperties, + ServicePrincipalDatastoreCredentials, + ServicePrincipalDatastoreSecrets, + ServiceTagDestination, + ServiceTagOutboundRule, + SetupScripts, + SharedPrivateLinkResource, + Sku, + SkuCapacity, + SkuResource, + SkuResourceArmPaginatedResult, + SkuSetting, + SparkJob, + SparkJobEntry, + SparkJobPythonEntry, + SparkJobScalaEntry, + SparkResourceConfiguration, + SslConfiguration, + StackEnsembleSettings, + StaticInputData, + StatusMessage, + StorageAccountDetails, + SweepJob, + SweepJobLimits, + SynapseSpark, + SynapseSparkProperties, + SystemCreatedAcrAccount, + SystemCreatedStorageAccount, + SystemData, + SystemService, + TableFixedParameters, + TableParameterSubspace, + TableSweepSettings, + TableVertical, + TableVerticalFeaturizationSettings, + TableVerticalLimitSettings, + TargetLags, + TargetRollingWindowSize, + TargetUtilizationScaleSettings, + TensorFlow, + TextClassification, + TextClassificationMultilabel, + TextNer, + TmpfsOptions, + TopNFeaturesByAttribution, + TrackedResource, + TrailingInputData, + TrainingSettings, + TrialComponent, + TriggerBase, + TritonInferencingServer, + TritonModelJobInput, + TritonModelJobOutput, + TruncationSelectionPolicy, + UpdateWorkspaceQuotas, + UpdateWorkspaceQuotasResult, + UriFileDataVersion, + UriFileJobInput, + UriFileJobOutput, + UriFolderDataVersion, + UriFolderJobInput, + UriFolderJobOutput, + Usage, + UsageName, + UserAccountCredentials, + UserAssignedIdentity, + UserCreatedAcrAccount, + UserCreatedStorageAccount, + UserIdentity, + UsernamePasswordAuthTypeWorkspaceConnectionProperties, + VirtualMachine, + VirtualMachineImage, + VirtualMachineSchema, + VirtualMachineSchemaProperties, + VirtualMachineSecrets, + VirtualMachineSecretsSchema, + VirtualMachineSize, + VirtualMachineSizeListResult, + VirtualMachineSshCredentials, + VolumeDefinition, + VolumeOptions, + Webhook, + Workspace, + WorkspaceConnectionAccessKey, + WorkspaceConnectionApiKey, + WorkspaceConnectionManagedIdentity, + WorkspaceConnectionPersonalAccessToken, + WorkspaceConnectionPropertiesV2, + WorkspaceConnectionPropertiesV2BasicResource, + WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult, + WorkspaceConnectionServicePrincipal, + WorkspaceConnectionSharedAccessSignature, + WorkspaceConnectionUpdateParameter, + WorkspaceConnectionUsernamePassword, + WorkspaceHubConfig, + WorkspaceListResult, + WorkspacePrivateEndpointResource, + WorkspaceUpdateParameters, +) +from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import __all__ as _patch_all +from ._patch import patch_sdk as _patch_sdk __all__ = [ - 'AKS', - 'AKSSchema', - 'AKSSchemaProperties', - 'AccessKeyAuthTypeWorkspaceConnectionProperties', - 'AccountKeyDatastoreCredentials', - 'AccountKeyDatastoreSecrets', - 'AcrDetails', - 'AksComputeSecrets', - 'AksComputeSecretsProperties', - 'AksNetworkingConfiguration', - 'AllFeatures', - 'AllNodes', - 'AmlCompute', - 'AmlComputeNodeInformation', - 'AmlComputeNodesInformation', - 'AmlComputeProperties', - 'AmlComputeSchema', - 'AmlOperation', - 'AmlOperationListResult', - 'AmlToken', - 'AmlTokenComputeIdentity', - 'AmlUserFeature', - 'ApiKeyAuthWorkspaceConnectionProperties', - 'ArmResourceId', - 'AssetBase', - 'AssetContainer', - 'AssetJobInput', - 'AssetJobOutput', - 'AssetReferenceBase', - 'AssignedUser', - 'AutoDeleteSetting', - 'AutoForecastHorizon', - 'AutoMLJob', - 'AutoMLVertical', - 'AutoNCrossValidations', - 'AutoPauseProperties', - 'AutoScaleProperties', - 'AutoSeasonality', - 'AutoTargetLags', - 'AutoTargetRollingWindowSize', - 'AutologgerSettings', - 'AzMonMonitoringAlertNotificationSettings', - 'AzureBlobDatastore', - 'AzureDataLakeGen1Datastore', - 'AzureDataLakeGen2Datastore', - 'AzureDatastore', - 'AzureDevOpsWebhook', - 'AzureFileDatastore', - 'AzureMLBatchInferencingServer', - 'AzureMLOnlineInferencingServer', - 'BanditPolicy', - 'BaseEnvironmentId', - 'BaseEnvironmentSource', - 'BatchDeployment', - 'BatchDeploymentConfiguration', - 'BatchDeploymentProperties', - 'BatchDeploymentTrackedResourceArmPaginatedResult', - 'BatchEndpoint', - 'BatchEndpointDefaults', - 'BatchEndpointProperties', - 'BatchEndpointTrackedResourceArmPaginatedResult', - 'BatchPipelineComponentDeploymentConfiguration', - 'BatchRetrySettings', - 'BayesianSamplingAlgorithm', - 'BindOptions', - 'BlobReferenceForConsumptionDto', - 'BuildContext', - 'CategoricalDataDriftMetricThreshold', - 'CategoricalDataQualityMetricThreshold', - 'CategoricalPredictionDriftMetricThreshold', - 'CertificateDatastoreCredentials', - 'CertificateDatastoreSecrets', - 'Classification', - 'ClassificationModelPerformanceMetricThreshold', - 'ClassificationTrainingSettings', - 'ClusterUpdateParameters', - 'CocoExportSummary', - 'CodeConfiguration', - 'CodeContainer', - 'CodeContainerProperties', - 'CodeContainerResourceArmPaginatedResult', - 'CodeVersion', - 'CodeVersionProperties', - 'CodeVersionResourceArmPaginatedResult', - 'Collection', - 'ColumnTransformer', - 'CommandJob', - 'CommandJobLimits', - 'ComponentConfiguration', - 'ComponentContainer', - 'ComponentContainerProperties', - 'ComponentContainerResourceArmPaginatedResult', - 'ComponentVersion', - 'ComponentVersionProperties', - 'ComponentVersionResourceArmPaginatedResult', - 'Compute', - 'ComputeInstance', - 'ComputeInstanceApplication', - 'ComputeInstanceAutologgerSettings', - 'ComputeInstanceConnectivityEndpoints', - 'ComputeInstanceContainer', - 'ComputeInstanceCreatedBy', - 'ComputeInstanceDataDisk', - 'ComputeInstanceDataMount', - 'ComputeInstanceEnvironmentInfo', - 'ComputeInstanceLastOperation', - 'ComputeInstanceProperties', - 'ComputeInstanceSchema', - 'ComputeInstanceSshSettings', - 'ComputeInstanceVersion', - 'ComputeResource', - 'ComputeResourceSchema', - 'ComputeRuntimeDto', - 'ComputeSchedules', - 'ComputeSecrets', - 'ComputeStartStopSchedule', - 'ContainerResourceRequirements', - 'ContainerResourceSettings', - 'CosmosDbSettings', - 'CreateMonitorAction', - 'Cron', - 'CronTrigger', - 'CsvExportSummary', - 'CustomForecastHorizon', - 'CustomInferencingServer', - 'CustomKeys', - 'CustomKeysWorkspaceConnectionProperties', - 'CustomMetricThreshold', - 'CustomModelJobInput', - 'CustomModelJobOutput', - 'CustomMonitoringSignal', - 'CustomNCrossValidations', - 'CustomSeasonality', - 'CustomService', - 'CustomTargetLags', - 'CustomTargetRollingWindowSize', - 'DataCollector', - 'DataContainer', - 'DataContainerProperties', - 'DataContainerResourceArmPaginatedResult', - 'DataDriftMetricThresholdBase', - 'DataDriftMonitoringSignal', - 'DataFactory', - 'DataImport', - 'DataImportSource', - 'DataLakeAnalytics', - 'DataLakeAnalyticsSchema', - 'DataLakeAnalyticsSchemaProperties', - 'DataPathAssetReference', - 'DataQualityMetricThresholdBase', - 'DataQualityMonitoringSignal', - 'DataVersionBase', - 'DataVersionBaseProperties', - 'DataVersionBaseResourceArmPaginatedResult', - 'DatabaseSource', - 'Databricks', - 'DatabricksComputeSecrets', - 'DatabricksComputeSecretsProperties', - 'DatabricksProperties', - 'DatabricksSchema', - 'DatasetExportSummary', - 'Datastore', - 'DatastoreCredentials', - 'DatastoreProperties', - 'DatastoreResourceArmPaginatedResult', - 'DatastoreSecrets', - 'DefaultScaleSettings', - 'DeploymentLogs', - 'DeploymentLogsRequest', - 'DeploymentResourceConfiguration', - 'DiagnoseRequestProperties', - 'DiagnoseResponseResult', - 'DiagnoseResponseResultValue', - 'DiagnoseResult', - 'DiagnoseWorkspaceParameters', - 'DistributionConfiguration', - 'Docker', - 'EarlyTerminationPolicy', - 'EmailMonitoringAlertNotificationSettings', - 'EncryptionKeyVaultUpdateProperties', - 'EncryptionProperty', - 'EncryptionUpdateProperties', - 'Endpoint', - 'EndpointAuthKeys', - 'EndpointAuthToken', - 'EndpointDeploymentPropertiesBase', - 'EndpointPropertiesBase', - 'EndpointScheduleAction', - 'EnvironmentContainer', - 'EnvironmentContainerProperties', - 'EnvironmentContainerResourceArmPaginatedResult', - 'EnvironmentVariable', - 'EnvironmentVersion', - 'EnvironmentVersionProperties', - 'EnvironmentVersionResourceArmPaginatedResult', - 'ErrorAdditionalInfo', - 'ErrorDetail', - 'ErrorResponse', - 'EstimatedVMPrice', - 'EstimatedVMPrices', - 'ExportSummary', - 'ExternalFQDNResponse', - 'FQDNEndpoint', - 'FQDNEndpointDetail', - 'FQDNEndpoints', - 'FQDNEndpointsPropertyBag', - 'Feature', - 'FeatureAttributionDriftMonitoringSignal', - 'FeatureAttributionMetricThreshold', - 'FeatureProperties', - 'FeatureResourceArmPaginatedResult', - 'FeatureStoreSettings', - 'FeatureSubset', - 'FeatureWindow', - 'FeaturesetContainer', - 'FeaturesetContainerProperties', - 'FeaturesetContainerResourceArmPaginatedResult', - 'FeaturesetSpecification', - 'FeaturesetVersion', - 'FeaturesetVersionBackfillRequest', - 'FeaturesetVersionBackfillResponse', - 'FeaturesetVersionProperties', - 'FeaturesetVersionResourceArmPaginatedResult', - 'FeaturestoreEntityContainer', - 'FeaturestoreEntityContainerProperties', - 'FeaturestoreEntityContainerResourceArmPaginatedResult', - 'FeaturestoreEntityVersion', - 'FeaturestoreEntityVersionProperties', - 'FeaturestoreEntityVersionResourceArmPaginatedResult', - 'FeaturizationSettings', - 'FileSystemSource', - 'FixedInputData', - 'FlavorData', - 'ForecastHorizon', - 'Forecasting', - 'ForecastingSettings', - 'ForecastingTrainingSettings', - 'FqdnOutboundRule', - 'GenerationSafetyQualityMetricThreshold', - 'GenerationSafetyQualityMonitoringSignal', - 'GenerationTokenStatisticsMetricThreshold', - 'GenerationTokenStatisticsSignal', - 'GridSamplingAlgorithm', - 'HDInsight', - 'HDInsightProperties', - 'HDInsightSchema', - 'HdfsDatastore', - 'IdAssetReference', - 'IdentityConfiguration', - 'IdentityForCmk', - 'IdleShutdownSetting', - 'Image', - 'ImageClassification', - 'ImageClassificationBase', - 'ImageClassificationMultilabel', - 'ImageInstanceSegmentation', - 'ImageLimitSettings', - 'ImageMetadata', - 'ImageModelDistributionSettings', - 'ImageModelDistributionSettingsClassification', - 'ImageModelDistributionSettingsObjectDetection', - 'ImageModelSettings', - 'ImageModelSettingsClassification', - 'ImageModelSettingsObjectDetection', - 'ImageObjectDetection', - 'ImageObjectDetectionBase', - 'ImageSweepSettings', - 'ImageVertical', - 'ImportDataAction', - 'IndexColumn', - 'InferenceContainerProperties', - 'InferencingServer', - 'InstanceTypeSchema', - 'InstanceTypeSchemaResources', - 'IntellectualProperty', - 'JobBase', - 'JobBaseProperties', - 'JobBaseResourceArmPaginatedResult', - 'JobInput', - 'JobLimits', - 'JobOutput', - 'JobResourceConfiguration', - 'JobScheduleAction', - 'JobService', - 'KerberosCredentials', - 'KerberosKeytabCredentials', - 'KerberosKeytabSecrets', - 'KerberosPasswordCredentials', - 'KerberosPasswordSecrets', - 'KeyVaultProperties', - 'Kubernetes', - 'KubernetesOnlineDeployment', - 'KubernetesProperties', - 'KubernetesSchema', - 'LabelCategory', - 'LabelClass', - 'LabelingDataConfiguration', - 'LabelingJob', - 'LabelingJobImageProperties', - 'LabelingJobInstructions', - 'LabelingJobMediaProperties', - 'LabelingJobProperties', - 'LabelingJobResourceArmPaginatedResult', - 'LabelingJobTextProperties', - 'LakeHouseArtifact', - 'ListAmlUserFeatureResult', - 'ListNotebookKeysResult', - 'ListStorageAccountKeysResult', - 'ListUsagesResult', - 'ListWorkspaceKeysResult', - 'ListWorkspaceQuotas', - 'LiteralJobInput', - 'MLAssistConfiguration', - 'MLAssistConfigurationDisabled', - 'MLAssistConfigurationEnabled', - 'MLFlowModelJobInput', - 'MLFlowModelJobOutput', - 'MLTableData', - 'MLTableJobInput', - 'MLTableJobOutput', - 'ManagedComputeIdentity', - 'ManagedIdentity', - 'ManagedIdentityAuthTypeWorkspaceConnectionProperties', - 'ManagedNetworkProvisionOptions', - 'ManagedNetworkProvisionStatus', - 'ManagedNetworkSettings', - 'ManagedOnlineDeployment', - 'ManagedServiceIdentity', - 'MaterializationComputeResource', - 'MaterializationSettings', - 'MedianStoppingPolicy', - 'ModelConfiguration', - 'ModelContainer', - 'ModelContainerProperties', - 'ModelContainerResourceArmPaginatedResult', - 'ModelPackageInput', - 'ModelPerformanceMetricThresholdBase', - 'ModelPerformanceSignal', - 'ModelProfile', - 'ModelVersion', - 'ModelVersionProperties', - 'ModelVersionResourceArmPaginatedResult', - 'MonitorComputeConfigurationBase', - 'MonitorComputeIdentityBase', - 'MonitorDefinition', - 'MonitorServerlessSparkCompute', - 'MonitoringAlertNotificationSettingsBase', - 'MonitoringDataSegment', - 'MonitoringFeatureFilterBase', - 'MonitoringInputDataBase', - 'MonitoringSignalBase', - 'MonitoringTarget', - 'MonitoringThreshold', - 'MonitoringWorkspaceConnection', - 'Mpi', - 'NCrossValidations', - 'NlpFixedParameters', - 'NlpParameterSubspace', - 'NlpSweepSettings', - 'NlpVertical', - 'NlpVerticalFeaturizationSettings', - 'NlpVerticalLimitSettings', - 'NodeStateCounts', - 'Nodes', - 'NoneAuthTypeWorkspaceConnectionProperties', - 'NoneDatastoreCredentials', - 'NotebookAccessTokenResult', - 'NotebookPreparationError', - 'NotebookResourceInfo', - 'NotificationSetting', - 'NumericalDataDriftMetricThreshold', - 'NumericalDataQualityMetricThreshold', - 'NumericalPredictionDriftMetricThreshold', - 'Objective', - 'OneLakeArtifact', - 'OneLakeDatastore', - 'OnlineDeployment', - 'OnlineDeploymentProperties', - 'OnlineDeploymentTrackedResourceArmPaginatedResult', - 'OnlineEndpoint', - 'OnlineEndpointProperties', - 'OnlineEndpointTrackedResourceArmPaginatedResult', - 'OnlineInferenceConfiguration', - 'OnlineRequestSettings', - 'OnlineScaleSettings', - 'OperationDisplay', - 'OsPatchingStatus', - 'OutboundRule', - 'OutboundRuleBasicResource', - 'OutboundRuleListResult', - 'OutputPathAssetReference', - 'PATAuthTypeWorkspaceConnectionProperties', - 'PackageInputPathBase', - 'PackageInputPathId', - 'PackageInputPathUrl', - 'PackageInputPathVersion', - 'PackageRequest', - 'PackageResponse', - 'PaginatedComputeResourcesList', - 'PartialBatchDeployment', - 'PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties', - 'PartialJobBase', - 'PartialJobBasePartialResource', - 'PartialManagedServiceIdentity', - 'PartialMinimalTrackedResource', - 'PartialMinimalTrackedResourceWithIdentity', - 'PartialMinimalTrackedResourceWithSku', - 'PartialMinimalTrackedResourceWithSkuAndIdentity', - 'PartialNotificationSetting', - 'PartialRegistryPartialTrackedResource', - 'PartialSku', - 'Password', - 'PendingUploadCredentialDto', - 'PendingUploadRequestDto', - 'PendingUploadResponseDto', - 'PersonalComputeInstanceSettings', - 'PipelineJob', - 'PredictionDriftMetricThresholdBase', - 'PredictionDriftMonitoringSignal', - 'PrivateEndpoint', - 'PrivateEndpointConnection', - 'PrivateEndpointConnectionListResult', - 'PrivateEndpointDestination', - 'PrivateEndpointOutboundRule', - 'PrivateEndpointResource', - 'PrivateLinkResource', - 'PrivateLinkResourceListResult', - 'PrivateLinkServiceConnectionState', - 'ProbeSettings', - 'ProgressMetrics', - 'PyTorch', - 'QueueSettings', - 'QuotaBaseProperties', - 'QuotaUpdateParameters', - 'RandomSamplingAlgorithm', - 'Ray', - 'Recurrence', - 'RecurrenceSchedule', - 'RecurrenceTrigger', - 'RegenerateEndpointKeysRequest', - 'Registry', - 'RegistryListCredentialsResult', - 'RegistryPartialManagedServiceIdentity', - 'RegistryPrivateEndpointConnection', - 'RegistryPrivateLinkServiceConnectionState', - 'RegistryRegionArmDetails', - 'RegistryTrackedResourceArmPaginatedResult', - 'Regression', - 'RegressionModelPerformanceMetricThreshold', - 'RegressionTrainingSettings', - 'RequestLogging', - 'ResizeSchema', - 'Resource', - 'ResourceBase', - 'ResourceConfiguration', - 'ResourceId', - 'ResourceName', - 'ResourceQuota', - 'Route', - 'SASAuthTypeWorkspaceConnectionProperties', - 'SASCredentialDto', - 'SamplingAlgorithm', - 'SasDatastoreCredentials', - 'SasDatastoreSecrets', - 'ScaleSettings', - 'ScaleSettingsInformation', - 'Schedule', - 'ScheduleActionBase', - 'ScheduleBase', - 'ScheduleProperties', - 'ScheduleResourceArmPaginatedResult', - 'ScriptReference', - 'ScriptsToExecute', - 'Seasonality', - 'SecretConfiguration', - 'ServerlessEndpoint', - 'ServerlessEndpointProperties', - 'ServerlessEndpointTrackedResourceArmPaginatedResult', - 'ServiceManagedResourcesSettings', - 'ServicePrincipalAuthTypeWorkspaceConnectionProperties', - 'ServicePrincipalDatastoreCredentials', - 'ServicePrincipalDatastoreSecrets', - 'ServiceTagDestination', - 'ServiceTagOutboundRule', - 'SetupScripts', - 'SharedPrivateLinkResource', - 'Sku', - 'SkuCapacity', - 'SkuResource', - 'SkuResourceArmPaginatedResult', - 'SkuSetting', - 'SparkJob', - 'SparkJobEntry', - 'SparkJobPythonEntry', - 'SparkJobScalaEntry', - 'SparkResourceConfiguration', - 'SslConfiguration', - 'StackEnsembleSettings', - 'StaticInputData', - 'StatusMessage', - 'StorageAccountDetails', - 'SweepJob', - 'SweepJobLimits', - 'SynapseSpark', - 'SynapseSparkProperties', - 'SystemCreatedAcrAccount', - 'SystemCreatedStorageAccount', - 'SystemData', - 'SystemService', - 'TableFixedParameters', - 'TableParameterSubspace', - 'TableSweepSettings', - 'TableVertical', - 'TableVerticalFeaturizationSettings', - 'TableVerticalLimitSettings', - 'TargetLags', - 'TargetRollingWindowSize', - 'TargetUtilizationScaleSettings', - 'TensorFlow', - 'TextClassification', - 'TextClassificationMultilabel', - 'TextNer', - 'TmpfsOptions', - 'TopNFeaturesByAttribution', - 'TrackedResource', - 'TrailingInputData', - 'TrainingSettings', - 'TrialComponent', - 'TriggerBase', - 'TritonInferencingServer', - 'TritonModelJobInput', - 'TritonModelJobOutput', - 'TruncationSelectionPolicy', - 'UpdateWorkspaceQuotas', - 'UpdateWorkspaceQuotasResult', - 'UriFileDataVersion', - 'UriFileJobInput', - 'UriFileJobOutput', - 'UriFolderDataVersion', - 'UriFolderJobInput', - 'UriFolderJobOutput', - 'Usage', - 'UsageName', - 'UserAccountCredentials', - 'UserAssignedIdentity', - 'UserCreatedAcrAccount', - 'UserCreatedStorageAccount', - 'UserIdentity', - 'UsernamePasswordAuthTypeWorkspaceConnectionProperties', - 'VirtualMachine', - 'VirtualMachineImage', - 'VirtualMachineSchema', - 'VirtualMachineSchemaProperties', - 'VirtualMachineSecrets', - 'VirtualMachineSecretsSchema', - 'VirtualMachineSize', - 'VirtualMachineSizeListResult', - 'VirtualMachineSshCredentials', - 'VolumeDefinition', - 'VolumeOptions', - 'Webhook', - 'Workspace', - 'WorkspaceConnectionAccessKey', - 'WorkspaceConnectionApiKey', - 'WorkspaceConnectionManagedIdentity', - 'WorkspaceConnectionPersonalAccessToken', - 'WorkspaceConnectionPropertiesV2', - 'WorkspaceConnectionPropertiesV2BasicResource', - 'WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult', - 'WorkspaceConnectionServicePrincipal', - 'WorkspaceConnectionSharedAccessSignature', - 'WorkspaceConnectionUpdateParameter', - 'WorkspaceConnectionUsernamePassword', - 'WorkspaceHubConfig', - 'WorkspaceListResult', - 'WorkspacePrivateEndpointResource', - 'WorkspaceUpdateParameters', - 'AllocationState', - 'ApplicationSharingPolicy', - 'AssetProvisioningState', - 'AutoDeleteCondition', - 'AutoRebuildSetting', - 'Autosave', - 'BaseEnvironmentSourceType', - 'BatchDeploymentConfigurationType', - 'BatchLoggingLevel', - 'BatchOutputAction', - 'BillingCurrency', - 'BlockedTransformers', - 'Caching', - 'CategoricalDataDriftMetric', - 'CategoricalDataQualityMetric', - 'CategoricalPredictionDriftMetric', - 'ClassificationModelPerformanceMetric', - 'ClassificationModels', - 'ClassificationMultilabelPrimaryMetrics', - 'ClassificationPrimaryMetrics', - 'ClusterPurpose', - 'ComputeInstanceAuthorizationType', - 'ComputeInstanceState', - 'ComputePowerAction', - 'ComputeType', - 'ConnectionAuthType', - 'ConnectionCategory', - 'ContainerType', - 'CreatedByType', - 'CredentialsType', - 'DataAvailabilityStatus', - 'DataCollectionMode', - 'DataImportSourceType', - 'DataType', - 'DatastoreType', - 'DeploymentProvisioningState', - 'DiagnoseResultLevel', - 'DistributionType', - 'EarlyTerminationPolicyType', - 'EgressPublicNetworkAccessType', - 'EmailNotificationEnableType', - 'EncryptionStatus', - 'EndpointAuthMode', - 'EndpointComputeType', - 'EndpointProvisioningState', - 'EndpointServiceConnectionStatus', - 'EnvironmentType', - 'EnvironmentVariableType', - 'ExportFormatType', - 'FeatureAttributionMetric', - 'FeatureDataType', - 'FeatureLags', - 'FeaturizationMode', - 'ForecastHorizonMode', - 'ForecastingModels', - 'ForecastingPrimaryMetrics', - 'GenerationSafetyQualityMetric', - 'GenerationTokenStatisticsMetric', - 'Goal', - 'IdentityConfigurationType', - 'ImageAnnotationType', - 'ImageType', - 'IncrementalDataRefresh', - 'InferencingServerType', - 'InputDeliveryMode', - 'InputPathType', - 'InstanceSegmentationPrimaryMetrics', - 'IsolationMode', - 'JobInputType', - 'JobLimitsType', - 'JobOutputType', - 'JobProvisioningState', - 'JobStatus', - 'JobTier', - 'JobType', - 'KeyType', - 'LearningRateScheduler', - 'ListViewType', - 'LoadBalancerType', - 'LogTrainingMetrics', - 'LogValidationLoss', - 'LogVerbosity', - 'MLAssistConfigurationType', - 'MLFlowAutologgerState', - 'ManagedNetworkStatus', - 'ManagedServiceIdentityType', - 'MaterializationStoreType', - 'MediaType', - 'MlflowAutologger', - 'ModelSize', - 'ModelTaskType', - 'MonitorComputeIdentityType', - 'MonitorComputeType', - 'MonitoringAlertNotificationType', - 'MonitoringFeatureDataType', - 'MonitoringFeatureFilterType', - 'MonitoringInputDataType', - 'MonitoringModelType', - 'MonitoringNotificationMode', - 'MonitoringSignalType', - 'MountAction', - 'MountState', - 'MultiSelect', - 'NCrossValidationsMode', - 'Network', - 'NlpLearningRateScheduler', - 'NodeState', - 'NodesValueType', - 'NumericalDataDriftMetric', - 'NumericalDataQualityMetric', - 'NumericalPredictionDriftMetric', - 'ObjectDetectionPrimaryMetrics', - 'OneLakeArtifactType', - 'OperatingSystemType', - 'OperationName', - 'OperationStatus', - 'OperationTrigger', - 'OrderString', - 'OsType', - 'OutputDeliveryMode', - 'PackageBuildState', - 'PackageInputDeliveryMode', - 'PackageInputType', - 'PatchStatus', - 'PendingUploadCredentialType', - 'PendingUploadType', - 'PrivateEndpointConnectionProvisioningState', - 'ProtectionLevel', - 'Protocol', - 'ProvisioningState', - 'ProvisioningStatus', - 'PublicNetworkAccessType', - 'QuotaUnit', - 'RandomSamplingAlgorithmRule', - 'RecurrenceFrequency', - 'ReferenceType', - 'RegressionModelPerformanceMetric', - 'RegressionModels', - 'RegressionPrimaryMetrics', - 'RemoteLoginPortPublicAccess', - 'RollingRateType', - 'RuleAction', - 'RuleCategory', - 'RuleStatus', - 'RuleType', - 'SamplingAlgorithmType', - 'ScaleType', - 'ScheduleActionType', - 'ScheduleListViewType', - 'ScheduleProvisioningState', - 'ScheduleProvisioningStatus', - 'ScheduleStatus', - 'SeasonalityMode', - 'SecretsType', - 'ServiceDataAccessAuthIdentity', - 'ShortSeriesHandlingConfiguration', - 'SkuScaleType', - 'SkuTier', - 'SourceType', - 'SparkJobEntryType', - 'SshPublicAccess', - 'SslConfigStatus', - 'StackMetaLearnerType', - 'Status', - 'StatusMessageLevel', - 'StochasticOptimizer', - 'StorageAccountType', - 'TargetAggregationFunction', - 'TargetLagsMode', - 'TargetRollingWindowSizeMode', - 'TaskType', - 'TextAnnotationType', - 'TrainingMode', - 'TriggerType', - 'UnderlyingResourceAction', - 'UnitOfMeasure', - 'UsageUnit', - 'UseStl', - 'VMPriceOSType', - 'VMTier', - 'ValidationMetricType', - 'VmPriority', - 'VolumeDefinitionType', - 'WebhookType', - 'WeekDay', + "AKS", + "AKSSchema", + "AKSSchemaProperties", + "AccessKeyAuthTypeWorkspaceConnectionProperties", + "AccountKeyDatastoreCredentials", + "AccountKeyDatastoreSecrets", + "AcrDetails", + "AksComputeSecrets", + "AksComputeSecretsProperties", + "AksNetworkingConfiguration", + "AllFeatures", + "AllNodes", + "AmlCompute", + "AmlComputeNodeInformation", + "AmlComputeNodesInformation", + "AmlComputeProperties", + "AmlComputeSchema", + "AmlToken", + "AmlTokenComputeIdentity", + "AmlUserFeature", + "ApiKeyAuthWorkspaceConnectionProperties", + "ArmResourceId", + "AssetBase", + "AssetContainer", + "AssetJobInput", + "AssetJobOutput", + "AssetReferenceBase", + "AssignedUser", + "AutoDeleteSetting", + "AutoForecastHorizon", + "AutoMLJob", + "AutoMLVertical", + "AutoNCrossValidations", + "AutoPauseProperties", + "AutoScaleProperties", + "AutoSeasonality", + "AutoTargetLags", + "AutoTargetRollingWindowSize", + "AutologgerSettings", + "AzMonMonitoringAlertNotificationSettings", + "AzureBlobDatastore", + "AzureDataLakeGen1Datastore", + "AzureDataLakeGen2Datastore", + "AzureDatastore", + "AzureDevOpsWebhook", + "AzureFileDatastore", + "AzureMLBatchInferencingServer", + "AzureMLOnlineInferencingServer", + "BanditPolicy", + "BaseEnvironmentId", + "BaseEnvironmentSource", + "BatchDeployment", + "BatchDeploymentConfiguration", + "BatchDeploymentProperties", + "BatchDeploymentTrackedResourceArmPaginatedResult", + "BatchEndpoint", + "BatchEndpointDefaults", + "BatchEndpointProperties", + "BatchEndpointTrackedResourceArmPaginatedResult", + "BatchPipelineComponentDeploymentConfiguration", + "BatchRetrySettings", + "BayesianSamplingAlgorithm", + "BindOptions", + "BlobReferenceForConsumptionDto", + "BuildContext", + "CategoricalDataDriftMetricThreshold", + "CategoricalDataQualityMetricThreshold", + "CategoricalPredictionDriftMetricThreshold", + "CertificateDatastoreCredentials", + "CertificateDatastoreSecrets", + "Classification", + "ClassificationModelPerformanceMetricThreshold", + "ClassificationTrainingSettings", + "ClusterUpdateParameters", + "CocoExportSummary", + "CodeConfiguration", + "CodeContainer", + "CodeContainerProperties", + "CodeContainerResourceArmPaginatedResult", + "CodeVersion", + "CodeVersionProperties", + "CodeVersionResourceArmPaginatedResult", + "Collection", + "ColumnTransformer", + "CommandJob", + "CommandJobLimits", + "ComponentConfiguration", + "ComponentContainer", + "ComponentContainerProperties", + "ComponentContainerResourceArmPaginatedResult", + "ComponentVersion", + "ComponentVersionProperties", + "ComponentVersionResourceArmPaginatedResult", + "Compute", + "ComputeInstance", + "ComputeInstanceApplication", + "ComputeInstanceAutologgerSettings", + "ComputeInstanceConnectivityEndpoints", + "ComputeInstanceContainer", + "ComputeInstanceCreatedBy", + "ComputeInstanceDataDisk", + "ComputeInstanceDataMount", + "ComputeInstanceEnvironmentInfo", + "ComputeInstanceLastOperation", + "ComputeInstanceProperties", + "ComputeInstanceSchema", + "ComputeInstanceSshSettings", + "ComputeInstanceVersion", + "ComputeResource", + "ComputeResourceSchema", + "ComputeRuntimeDto", + "ComputeSchedules", + "ComputeSecrets", + "ComputeStartStopSchedule", + "ContainerResourceRequirements", + "ContainerResourceSettings", + "CosmosDbSettings", + "CreateMonitorAction", + "Cron", + "CronTrigger", + "CsvExportSummary", + "CustomForecastHorizon", + "CustomInferencingServer", + "CustomKeys", + "CustomKeysWorkspaceConnectionProperties", + "CustomMetricThreshold", + "CustomModelJobInput", + "CustomModelJobOutput", + "CustomMonitoringSignal", + "CustomNCrossValidations", + "CustomSeasonality", + "CustomService", + "CustomTargetLags", + "CustomTargetRollingWindowSize", + "DataCollector", + "DataContainer", + "DataContainerProperties", + "DataContainerResourceArmPaginatedResult", + "DataDriftMetricThresholdBase", + "DataDriftMonitoringSignal", + "DataFactory", + "DataImport", + "DataImportSource", + "DataLakeAnalytics", + "DataLakeAnalyticsSchema", + "DataLakeAnalyticsSchemaProperties", + "DataPathAssetReference", + "DataQualityMetricThresholdBase", + "DataQualityMonitoringSignal", + "DataVersionBase", + "DataVersionBaseProperties", + "DataVersionBaseResourceArmPaginatedResult", + "DatabaseSource", + "Databricks", + "DatabricksComputeSecrets", + "DatabricksComputeSecretsProperties", + "DatabricksProperties", + "DatabricksSchema", + "DatasetExportSummary", + "Datastore", + "DatastoreCredentials", + "DatastoreProperties", + "DatastoreResourceArmPaginatedResult", + "DatastoreSecrets", + "DefaultScaleSettings", + "DeploymentLogs", + "DeploymentLogsRequest", + "DeploymentResourceConfiguration", + "DiagnoseRequestProperties", + "DiagnoseResponseResult", + "DiagnoseResponseResultValue", + "DiagnoseResult", + "DiagnoseWorkspaceParameters", + "DistributionConfiguration", + "Docker", + "EarlyTerminationPolicy", + "EmailMonitoringAlertNotificationSettings", + "EncryptionKeyVaultUpdateProperties", + "EncryptionProperty", + "EncryptionUpdateProperties", + "Endpoint", + "EndpointAuthKeys", + "EndpointAuthToken", + "EndpointDeploymentPropertiesBase", + "EndpointPropertiesBase", + "EndpointScheduleAction", + "EnvironmentContainer", + "EnvironmentContainerProperties", + "EnvironmentContainerResourceArmPaginatedResult", + "EnvironmentVariable", + "EnvironmentVersion", + "EnvironmentVersionProperties", + "EnvironmentVersionResourceArmPaginatedResult", + "ErrorAdditionalInfo", + "ErrorDetail", + "ErrorResponse", + "EstimatedVMPrice", + "EstimatedVMPrices", + "ExportSummary", + "ExternalFQDNResponse", + "FQDNEndpoint", + "FQDNEndpointDetail", + "FQDNEndpoints", + "FQDNEndpointsPropertyBag", + "Feature", + "FeatureAttributionDriftMonitoringSignal", + "FeatureAttributionMetricThreshold", + "FeatureProperties", + "FeatureResourceArmPaginatedResult", + "FeatureStoreSettings", + "FeatureSubset", + "FeatureWindow", + "FeaturesetContainer", + "FeaturesetContainerProperties", + "FeaturesetContainerResourceArmPaginatedResult", + "FeaturesetSpecification", + "FeaturesetVersion", + "FeaturesetVersionBackfillRequest", + "FeaturesetVersionBackfillResponse", + "FeaturesetVersionProperties", + "FeaturesetVersionResourceArmPaginatedResult", + "FeaturestoreEntityContainer", + "FeaturestoreEntityContainerProperties", + "FeaturestoreEntityContainerResourceArmPaginatedResult", + "FeaturestoreEntityVersion", + "FeaturestoreEntityVersionProperties", + "FeaturestoreEntityVersionResourceArmPaginatedResult", + "FeaturizationSettings", + "FileSystemSource", + "FixedInputData", + "FlavorData", + "ForecastHorizon", + "Forecasting", + "ForecastingSettings", + "ForecastingTrainingSettings", + "FqdnOutboundRule", + "GenerationSafetyQualityMetricThreshold", + "GenerationSafetyQualityMonitoringSignal", + "GenerationTokenStatisticsMetricThreshold", + "GenerationTokenStatisticsSignal", + "GridSamplingAlgorithm", + "HDInsight", + "HDInsightProperties", + "HDInsightSchema", + "HdfsDatastore", + "IdAssetReference", + "IdentityConfiguration", + "IdentityForCmk", + "IdleShutdownSetting", + "Image", + "ImageClassification", + "ImageClassificationBase", + "ImageClassificationMultilabel", + "ImageInstanceSegmentation", + "ImageLimitSettings", + "ImageMetadata", + "ImageModelDistributionSettings", + "ImageModelDistributionSettingsClassification", + "ImageModelDistributionSettingsObjectDetection", + "ImageModelSettings", + "ImageModelSettingsClassification", + "ImageModelSettingsObjectDetection", + "ImageObjectDetection", + "ImageObjectDetectionBase", + "ImageSweepSettings", + "ImageVertical", + "ImportDataAction", + "IndexColumn", + "InferenceContainerProperties", + "InferencingServer", + "InstanceTypeSchema", + "InstanceTypeSchemaResources", + "IntellectualProperty", + "JobBase", + "JobBaseProperties", + "JobBaseResourceArmPaginatedResult", + "JobInput", + "JobLimits", + "JobOutput", + "JobResourceConfiguration", + "JobScheduleAction", + "JobService", + "KerberosCredentials", + "KerberosKeytabCredentials", + "KerberosKeytabSecrets", + "KerberosPasswordCredentials", + "KerberosPasswordSecrets", + "KeyVaultProperties", + "Kubernetes", + "KubernetesOnlineDeployment", + "KubernetesProperties", + "KubernetesSchema", + "LabelCategory", + "LabelClass", + "LabelingDataConfiguration", + "LabelingJob", + "LabelingJobImageProperties", + "LabelingJobInstructions", + "LabelingJobMediaProperties", + "LabelingJobProperties", + "LabelingJobResourceArmPaginatedResult", + "LabelingJobTextProperties", + "LakeHouseArtifact", + "ListAmlUserFeatureResult", + "ListNotebookKeysResult", + "ListStorageAccountKeysResult", + "ListUsagesResult", + "ListWorkspaceKeysResult", + "ListWorkspaceQuotas", + "LiteralJobInput", + "MLAssistConfiguration", + "MLAssistConfigurationDisabled", + "MLAssistConfigurationEnabled", + "MLFlowModelJobInput", + "MLFlowModelJobOutput", + "MLTableData", + "MLTableJobInput", + "MLTableJobOutput", + "ManagedComputeIdentity", + "ManagedIdentity", + "ManagedIdentityAuthTypeWorkspaceConnectionProperties", + "ManagedNetworkProvisionOptions", + "ManagedNetworkProvisionStatus", + "ManagedNetworkSettings", + "ManagedOnlineDeployment", + "ManagedServiceIdentity", + "MaterializationComputeResource", + "MaterializationSettings", + "MedianStoppingPolicy", + "ModelConfiguration", + "ModelContainer", + "ModelContainerProperties", + "ModelContainerResourceArmPaginatedResult", + "ModelPackageInput", + "ModelPerformanceMetricThresholdBase", + "ModelPerformanceSignal", + "ModelProfile", + "ModelVersion", + "ModelVersionProperties", + "ModelVersionResourceArmPaginatedResult", + "MonitorComputeConfigurationBase", + "MonitorComputeIdentityBase", + "MonitorDefinition", + "MonitorServerlessSparkCompute", + "MonitoringAlertNotificationSettingsBase", + "MonitoringDataSegment", + "MonitoringFeatureFilterBase", + "MonitoringInputDataBase", + "MonitoringSignalBase", + "MonitoringTarget", + "MonitoringThreshold", + "MonitoringWorkspaceConnection", + "Mpi", + "NCrossValidations", + "NlpFixedParameters", + "NlpParameterSubspace", + "NlpSweepSettings", + "NlpVertical", + "NlpVerticalFeaturizationSettings", + "NlpVerticalLimitSettings", + "NodeStateCounts", + "Nodes", + "NoneAuthTypeWorkspaceConnectionProperties", + "NoneDatastoreCredentials", + "NotebookAccessTokenResult", + "NotebookPreparationError", + "NotebookResourceInfo", + "NotificationSetting", + "NumericalDataDriftMetricThreshold", + "NumericalDataQualityMetricThreshold", + "NumericalPredictionDriftMetricThreshold", + "Objective", + "OneLakeArtifact", + "OneLakeDatastore", + "OnlineDeployment", + "OnlineDeploymentProperties", + "OnlineDeploymentTrackedResourceArmPaginatedResult", + "OnlineEndpoint", + "OnlineEndpointProperties", + "OnlineEndpointTrackedResourceArmPaginatedResult", + "OnlineInferenceConfiguration", + "OnlineRequestSettings", + "OnlineScaleSettings", + "Operation", + "OperationDisplay", + "OperationListResult", + "OsPatchingStatus", + "OutboundRule", + "OutboundRuleBasicResource", + "OutboundRuleListResult", + "OutputPathAssetReference", + "PATAuthTypeWorkspaceConnectionProperties", + "PackageInputPathBase", + "PackageInputPathId", + "PackageInputPathUrl", + "PackageInputPathVersion", + "PackageRequest", + "PackageResponse", + "PaginatedComputeResourcesList", + "PartialBatchDeployment", + "PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties", + "PartialJobBase", + "PartialJobBasePartialResource", + "PartialManagedServiceIdentity", + "PartialMinimalTrackedResource", + "PartialMinimalTrackedResourceWithIdentity", + "PartialMinimalTrackedResourceWithSku", + "PartialMinimalTrackedResourceWithSkuAndIdentity", + "PartialNotificationSetting", + "PartialRegistryPartialTrackedResource", + "PartialSku", + "Password", + "PendingUploadCredentialDto", + "PendingUploadRequestDto", + "PendingUploadResponseDto", + "PersonalComputeInstanceSettings", + "PipelineJob", + "PredictionDriftMetricThresholdBase", + "PredictionDriftMonitoringSignal", + "PrivateEndpoint", + "PrivateEndpointConnection", + "PrivateEndpointConnectionListResult", + "PrivateEndpointDestination", + "PrivateEndpointOutboundRule", + "PrivateEndpointResource", + "PrivateLinkResource", + "PrivateLinkResourceListResult", + "PrivateLinkServiceConnectionState", + "ProbeSettings", + "ProgressMetrics", + "PyTorch", + "QueueSettings", + "QuotaBaseProperties", + "QuotaUpdateParameters", + "RandomSamplingAlgorithm", + "Ray", + "Recurrence", + "RecurrenceSchedule", + "RecurrenceTrigger", + "RegenerateEndpointKeysRequest", + "Registry", + "RegistryListCredentialsResult", + "RegistryPartialManagedServiceIdentity", + "RegistryPrivateEndpointConnection", + "RegistryPrivateLinkServiceConnectionState", + "RegistryRegionArmDetails", + "RegistryTrackedResourceArmPaginatedResult", + "Regression", + "RegressionModelPerformanceMetricThreshold", + "RegressionTrainingSettings", + "RequestLogging", + "ResizeSchema", + "Resource", + "ResourceBase", + "ResourceConfiguration", + "ResourceId", + "ResourceName", + "ResourceQuota", + "Route", + "SASAuthTypeWorkspaceConnectionProperties", + "SASCredentialDto", + "SamplingAlgorithm", + "SasDatastoreCredentials", + "SasDatastoreSecrets", + "ScaleSettings", + "ScaleSettingsInformation", + "Schedule", + "ScheduleActionBase", + "ScheduleBase", + "ScheduleProperties", + "ScheduleResourceArmPaginatedResult", + "ScriptReference", + "ScriptsToExecute", + "Seasonality", + "SecretConfiguration", + "ServerlessComputeSettings", + "ServerlessEndpoint", + "ServerlessEndpointProperties", + "ServerlessEndpointTrackedResourceArmPaginatedResult", + "ServiceManagedResourcesSettings", + "ServicePrincipalAuthTypeWorkspaceConnectionProperties", + "ServicePrincipalDatastoreCredentials", + "ServicePrincipalDatastoreSecrets", + "ServiceTagDestination", + "ServiceTagOutboundRule", + "SetupScripts", + "SharedPrivateLinkResource", + "Sku", + "SkuCapacity", + "SkuResource", + "SkuResourceArmPaginatedResult", + "SkuSetting", + "SparkJob", + "SparkJobEntry", + "SparkJobPythonEntry", + "SparkJobScalaEntry", + "SparkResourceConfiguration", + "SslConfiguration", + "StackEnsembleSettings", + "StaticInputData", + "StatusMessage", + "StorageAccountDetails", + "SweepJob", + "SweepJobLimits", + "SynapseSpark", + "SynapseSparkProperties", + "SystemCreatedAcrAccount", + "SystemCreatedStorageAccount", + "SystemData", + "SystemService", + "TableFixedParameters", + "TableParameterSubspace", + "TableSweepSettings", + "TableVertical", + "TableVerticalFeaturizationSettings", + "TableVerticalLimitSettings", + "TargetLags", + "TargetRollingWindowSize", + "TargetUtilizationScaleSettings", + "TensorFlow", + "TextClassification", + "TextClassificationMultilabel", + "TextNer", + "TmpfsOptions", + "TopNFeaturesByAttribution", + "TrackedResource", + "TrailingInputData", + "TrainingSettings", + "TrialComponent", + "TriggerBase", + "TritonInferencingServer", + "TritonModelJobInput", + "TritonModelJobOutput", + "TruncationSelectionPolicy", + "UpdateWorkspaceQuotas", + "UpdateWorkspaceQuotasResult", + "UriFileDataVersion", + "UriFileJobInput", + "UriFileJobOutput", + "UriFolderDataVersion", + "UriFolderJobInput", + "UriFolderJobOutput", + "Usage", + "UsageName", + "UserAccountCredentials", + "UserAssignedIdentity", + "UserCreatedAcrAccount", + "UserCreatedStorageAccount", + "UserIdentity", + "UsernamePasswordAuthTypeWorkspaceConnectionProperties", + "VirtualMachine", + "VirtualMachineImage", + "VirtualMachineSchema", + "VirtualMachineSchemaProperties", + "VirtualMachineSecrets", + "VirtualMachineSecretsSchema", + "VirtualMachineSize", + "VirtualMachineSizeListResult", + "VirtualMachineSshCredentials", + "VolumeDefinition", + "VolumeOptions", + "Webhook", + "Workspace", + "WorkspaceConnectionAccessKey", + "WorkspaceConnectionApiKey", + "WorkspaceConnectionManagedIdentity", + "WorkspaceConnectionPersonalAccessToken", + "WorkspaceConnectionPropertiesV2", + "WorkspaceConnectionPropertiesV2BasicResource", + "WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", + "WorkspaceConnectionServicePrincipal", + "WorkspaceConnectionSharedAccessSignature", + "WorkspaceConnectionUpdateParameter", + "WorkspaceConnectionUsernamePassword", + "WorkspaceHubConfig", + "WorkspaceListResult", + "WorkspacePrivateEndpointResource", + "WorkspaceUpdateParameters", + "ActionType", + "AllocationState", + "ApplicationSharingPolicy", + "AssetProvisioningState", + "AutoDeleteCondition", + "AutoRebuildSetting", + "Autosave", + "BaseEnvironmentSourceType", + "BatchDeploymentConfigurationType", + "BatchLoggingLevel", + "BatchOutputAction", + "BillingCurrency", + "BlockedTransformers", + "Caching", + "CategoricalDataDriftMetric", + "CategoricalDataQualityMetric", + "CategoricalPredictionDriftMetric", + "ClassificationModelPerformanceMetric", + "ClassificationModels", + "ClassificationMultilabelPrimaryMetrics", + "ClassificationPrimaryMetrics", + "ClusterPurpose", + "ComputeInstanceAuthorizationType", + "ComputeInstanceState", + "ComputePowerAction", + "ComputeType", + "ConnectionAuthType", + "ConnectionCategory", + "ContainerType", + "CreatedByType", + "CredentialsType", + "DataAvailabilityStatus", + "DataCollectionMode", + "DataImportSourceType", + "DataType", + "DatastoreType", + "DeploymentProvisioningState", + "DiagnoseResultLevel", + "DistributionType", + "EarlyTerminationPolicyType", + "EgressPublicNetworkAccessType", + "EmailNotificationEnableType", + "EncryptionStatus", + "EndpointAuthMode", + "EndpointComputeType", + "EndpointProvisioningState", + "EndpointServiceConnectionStatus", + "EnvironmentType", + "EnvironmentVariableType", + "ExportFormatType", + "FeatureAttributionMetric", + "FeatureDataType", + "FeatureLags", + "FeaturizationMode", + "ForecastHorizonMode", + "ForecastingModels", + "ForecastingPrimaryMetrics", + "GenerationSafetyQualityMetric", + "GenerationTokenStatisticsMetric", + "Goal", + "IdentityConfigurationType", + "ImageAnnotationType", + "ImageType", + "IncrementalDataRefresh", + "InferencingServerType", + "InputDeliveryMode", + "InputPathType", + "InstanceSegmentationPrimaryMetrics", + "IsolationMode", + "JobInputType", + "JobLimitsType", + "JobOutputType", + "JobProvisioningState", + "JobStatus", + "JobTier", + "JobType", + "KeyType", + "LearningRateScheduler", + "ListViewType", + "LoadBalancerType", + "LogTrainingMetrics", + "LogValidationLoss", + "LogVerbosity", + "MLAssistConfigurationType", + "MLFlowAutologgerState", + "ManagedNetworkStatus", + "ManagedServiceIdentityType", + "MaterializationStoreType", + "MediaType", + "MlflowAutologger", + "ModelSize", + "ModelTaskType", + "MonitorComputeIdentityType", + "MonitorComputeType", + "MonitoringAlertNotificationType", + "MonitoringFeatureDataType", + "MonitoringFeatureFilterType", + "MonitoringInputDataType", + "MonitoringModelType", + "MonitoringNotificationMode", + "MonitoringSignalType", + "MountAction", + "MountState", + "MultiSelect", + "NCrossValidationsMode", + "Network", + "NlpLearningRateScheduler", + "NodeState", + "NodesValueType", + "NumericalDataDriftMetric", + "NumericalDataQualityMetric", + "NumericalPredictionDriftMetric", + "ObjectDetectionPrimaryMetrics", + "OneLakeArtifactType", + "OperatingSystemType", + "OperationName", + "OperationStatus", + "OperationTrigger", + "OrderString", + "Origin", + "OsType", + "OutputDeliveryMode", + "PackageBuildState", + "PackageInputDeliveryMode", + "PackageInputType", + "PatchStatus", + "PendingUploadCredentialType", + "PendingUploadType", + "PrivateEndpointConnectionProvisioningState", + "ProtectionLevel", + "Protocol", + "ProvisioningState", + "ProvisioningStatus", + "PublicNetworkAccessType", + "QuotaUnit", + "RandomSamplingAlgorithmRule", + "RecurrenceFrequency", + "ReferenceType", + "RegressionModelPerformanceMetric", + "RegressionModels", + "RegressionPrimaryMetrics", + "RemoteLoginPortPublicAccess", + "RollingRateType", + "RuleAction", + "RuleCategory", + "RuleStatus", + "RuleType", + "SamplingAlgorithmType", + "ScaleType", + "ScheduleActionType", + "ScheduleListViewType", + "ScheduleProvisioningState", + "ScheduleProvisioningStatus", + "ScheduleStatus", + "SeasonalityMode", + "SecretsType", + "ServiceDataAccessAuthIdentity", + "ShortSeriesHandlingConfiguration", + "SkuScaleType", + "SkuTier", + "SourceType", + "SparkJobEntryType", + "SshPublicAccess", + "SslConfigStatus", + "StackMetaLearnerType", + "Status", + "StatusMessageLevel", + "StochasticOptimizer", + "StorageAccountType", + "TargetAggregationFunction", + "TargetLagsMode", + "TargetRollingWindowSizeMode", + "TaskType", + "TextAnnotationType", + "TrainingMode", + "TriggerType", + "UnderlyingResourceAction", + "UnitOfMeasure", + "UsageUnit", + "UseStl", + "VMPriceOSType", + "VMTier", + "ValidationMetricType", + "VmPriority", + "VolumeDefinitionType", + "WebhookType", + "WeekDay", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_azure_machine_learning_services_enums.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_azure_machine_learning_services_enums.py new file mode 100644 index 000000000000..8c3ed339cf4d --- /dev/null +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_azure_machine_learning_services_enums.py @@ -0,0 +1,2060 @@ +# coding=utf-8 +# -------------------------------------------------------------------------- +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. See License.txt in the project root for license information. +# Code generated by Microsoft (R) AutoRest Code Generator. +# Changes may cause incorrect behavior and will be lost if the code is regenerated. +# -------------------------------------------------------------------------- + +from enum import Enum + +from azure.core import CaseInsensitiveEnumMeta + + +class ActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum. Indicates the action type. "Internal" refers to actions that are for internal only APIs.""" + + INTERNAL = "Internal" + + +class AllocationState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Allocation state of the compute. Possible values are: steady - Indicates that the compute is + not resizing. There are no changes to the number of compute nodes in the compute in progress. A + compute enters this state when it is created and when no operations are being performed on the + compute to change the number of compute nodes. resizing - Indicates that the compute is + resizing; that is, compute nodes are being added to or removed from the compute. + """ + + STEADY = "Steady" + RESIZING = "Resizing" + + +class ApplicationSharingPolicy(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Policy for sharing applications on this compute instance among users of parent workspace. If + Personal, only the creator can access applications on this compute instance. When Shared, any + workspace user can access applications on this instance depending on his/her assigned role. + """ + + PERSONAL = "Personal" + SHARED = "Shared" + + +class AssetProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Provisioning state of registry asset.""" + + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + CREATING = "Creating" + UPDATING = "Updating" + DELETING = "Deleting" + + +class AutoDeleteCondition(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AutoDeleteCondition.""" + + CREATED_GREATER_THAN = "CreatedGreaterThan" + LAST_ACCESSED_GREATER_THAN = "LastAccessedGreaterThan" + + +class AutoRebuildSetting(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AutoRebuild setting for the derived image.""" + + DISABLED = "Disabled" + ON_BASE_IMAGE_UPDATE = "OnBaseImageUpdate" + + +class Autosave(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Auto save settings.""" + + NONE = "None" + LOCAL = "Local" + REMOTE = "Remote" + + +class BaseEnvironmentSourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Base environment type.""" + + ENVIRONMENT_ASSET = "EnvironmentAsset" + + +class BatchDeploymentConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The enumerated property types for batch deployments.""" + + MODEL = "Model" + PIPELINE_COMPONENT = "PipelineComponent" + + +class BatchLoggingLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Log verbosity for batch inferencing. + Increasing verbosity order for logging is : Warning, Info and Debug. + The default value is Info. + """ + + INFO = "Info" + WARNING = "Warning" + DEBUG = "Debug" + + +class BatchOutputAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine how batch inferencing will handle output.""" + + SUMMARY_ONLY = "SummaryOnly" + APPEND_ROW = "AppendRow" + + +class BillingCurrency(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Three lettered code specifying the currency of the VM price. Example: USD.""" + + USD = "USD" + + +class BlockedTransformers(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for all classification models supported by AutoML.""" + + TEXT_TARGET_ENCODER = "TextTargetEncoder" + """Target encoding for text data.""" + ONE_HOT_ENCODER = "OneHotEncoder" + """Ohe hot encoding creates a binary feature transformation.""" + CAT_TARGET_ENCODER = "CatTargetEncoder" + """Target encoding for categorical data.""" + TF_IDF = "TfIdf" + """Tf-Idf stands for, term-frequency times inverse document-frequency. This is a common term + #: weighting scheme for identifying information from documents.""" + WO_E_TARGET_ENCODER = "WoETargetEncoder" + """Weight of Evidence encoding is a technique used to encode categorical variables. It uses the + #: natural log of the P(1)/P(0) to create weights.""" + LABEL_ENCODER = "LabelEncoder" + """Label encoder converts labels/categorical variables in a numerical form.""" + WORD_EMBEDDING = "WordEmbedding" + """Word embedding helps represents words or phrases as a vector, or a series of numbers.""" + NAIVE_BAYES = "NaiveBayes" + """Naive Bayes is a classified that is used for classification of discrete features that are + #: categorically distributed.""" + COUNT_VECTORIZER = "CountVectorizer" + """Count Vectorizer converts a collection of text documents to a matrix of token counts.""" + HASH_ONE_HOT_ENCODER = "HashOneHotEncoder" + """Hashing One Hot Encoder can turn categorical variables into a limited number of new features. + #: This is often used for high-cardinality categorical features.""" + + +class Caching(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Caching type of Data Disk.""" + + NONE = "None" + READ_ONLY = "ReadOnly" + READ_WRITE = "ReadWrite" + + +class CategoricalDataDriftMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """CategoricalDataDriftMetric.""" + + JENSEN_SHANNON_DISTANCE = "JensenShannonDistance" + """The Jensen Shannon Distance (JSD) metric.""" + POPULATION_STABILITY_INDEX = "PopulationStabilityIndex" + """The Population Stability Index (PSI) metric.""" + PEARSONS_CHI_SQUARED_TEST = "PearsonsChiSquaredTest" + """The Pearsons Chi Squared Test metric.""" + + +class CategoricalDataQualityMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """CategoricalDataQualityMetric.""" + + NULL_VALUE_RATE = "NullValueRate" + """Calculates the rate of null values.""" + DATA_TYPE_ERROR_RATE = "DataTypeErrorRate" + """Calculates the rate of data type errors.""" + OUT_OF_BOUNDS_RATE = "OutOfBoundsRate" + """Calculates the rate values are out of bounds.""" + + +class CategoricalPredictionDriftMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """CategoricalPredictionDriftMetric.""" + + JENSEN_SHANNON_DISTANCE = "JensenShannonDistance" + """The Jensen Shannon Distance (JSD) metric.""" + POPULATION_STABILITY_INDEX = "PopulationStabilityIndex" + """The Population Stability Index (PSI) metric.""" + PEARSONS_CHI_SQUARED_TEST = "PearsonsChiSquaredTest" + """The Pearsons Chi Squared Test metric.""" + + +class ClassificationModelPerformanceMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ClassificationModelPerformanceMetric.""" + + ACCURACY = "Accuracy" + """Calculates the accuracy of the model predictions.""" + PRECISION = "Precision" + """Calculates the precision of the model predictions.""" + RECALL = "Recall" + """Calculates the recall of the model predictions.""" + + +class ClassificationModels(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for all classification models supported by AutoML.""" + + LOGISTIC_REGRESSION = "LogisticRegression" + """Logistic regression is a fundamental classification technique. + #: It belongs to the group of linear classifiers and is somewhat similar to polynomial and linear + #: regression. + #: Logistic regression is fast and relatively uncomplicated, and it's convenient for you to + #: interpret the results. + #: Although it's essentially a method for binary classification, it can also be applied to + #: multiclass problems.""" + SGD = "SGD" + """SGD: Stochastic gradient descent is an optimization algorithm often used in machine learning + #: applications + #: to find the model parameters that correspond to the best fit between predicted and actual + #: outputs.""" + MULTINOMIAL_NAIVE_BAYES = "MultinomialNaiveBayes" + """The multinomial Naive Bayes classifier is suitable for classification with discrete features + #: (e.g., word counts for text classification). + #: The multinomial distribution normally requires integer feature counts. However, in practice, + #: fractional counts such as tf-idf may also work.""" + BERNOULLI_NAIVE_BAYES = "BernoulliNaiveBayes" + """Naive Bayes classifier for multivariate Bernoulli models.""" + SVM = "SVM" + """A support vector machine (SVM) is a supervised machine learning model that uses classification + #: algorithms for two-group classification problems. + #: After giving an SVM model sets of labeled training data for each category, they're able to + #: categorize new text.""" + LINEAR_SVM = "LinearSVM" + """A support vector machine (SVM) is a supervised machine learning model that uses classification + #: algorithms for two-group classification problems. + #: After giving an SVM model sets of labeled training data for each category, they're able to + #: categorize new text. + #: Linear SVM performs best when input data is linear, i.e., data can be easily classified by + #: drawing the straight line between classified values on a plotted graph.""" + KNN = "KNN" + """K-nearest neighbors (KNN) algorithm uses 'feature similarity' to predict the values of new + #: datapoints + #: which further means that the new data point will be assigned a value based on how closely it + #: matches the points in the training set.""" + DECISION_TREE = "DecisionTree" + """Decision Trees are a non-parametric supervised learning method used for both classification and + #: regression tasks. + #: The goal is to create a model that predicts the value of a target variable by learning simple + #: decision rules inferred from the data features.""" + RANDOM_FOREST = "RandomForest" + """Random forest is a supervised learning algorithm. + #: The "forest" it builds, is an ensemble of decision trees, usually trained with the bagging + #: method. + #: The general idea of the bagging method is that a combination of learning models increases the + #: overall result.""" + EXTREME_RANDOM_TREES = "ExtremeRandomTrees" + """Extreme Trees is an ensemble machine learning algorithm that combines the predictions from many + #: decision trees. It is related to the widely used random forest algorithm.""" + LIGHT_GBM = "LightGBM" + """LightGBM is a gradient boosting framework that uses tree based learning algorithms.""" + GRADIENT_BOOSTING = "GradientBoosting" + """The technique of transiting week learners into a strong learner is called Boosting. The + #: gradient boosting algorithm process works on this theory of execution.""" + XG_BOOST_CLASSIFIER = "XGBoostClassifier" + """XGBoost: Extreme Gradient Boosting Algorithm. This algorithm is used for structured data where + #: target column values can be divided into distinct class values.""" + + +class ClassificationMultilabelPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for classification multilabel tasks.""" + + AUC_WEIGHTED = "AUCWeighted" + """AUC is the Area under the curve. + #: This metric represents arithmetic mean of the score for each class, + #: weighted by the number of true instances in each class.""" + ACCURACY = "Accuracy" + """Accuracy is the ratio of predictions that exactly match the true class labels.""" + NORM_MACRO_RECALL = "NormMacroRecall" + """Normalized macro recall is recall macro-averaged and normalized, so that random + #: performance has a score of 0, and perfect performance has a score of 1.""" + AVERAGE_PRECISION_SCORE_WEIGHTED = "AveragePrecisionScoreWeighted" + """The arithmetic mean of the average precision score for each class, weighted by + #: the number of true instances in each class.""" + PRECISION_SCORE_WEIGHTED = "PrecisionScoreWeighted" + """The arithmetic mean of precision for each class, weighted by number of true instances in each + #: class.""" + IOU = "IOU" + """Intersection Over Union. Intersection of predictions divided by union of predictions.""" + + +class ClassificationPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for classification tasks.""" + + AUC_WEIGHTED = "AUCWeighted" + """AUC is the Area under the curve. + #: This metric represents arithmetic mean of the score for each class, + #: weighted by the number of true instances in each class.""" + ACCURACY = "Accuracy" + """Accuracy is the ratio of predictions that exactly match the true class labels.""" + NORM_MACRO_RECALL = "NormMacroRecall" + """Normalized macro recall is recall macro-averaged and normalized, so that random + #: performance has a score of 0, and perfect performance has a score of 1.""" + AVERAGE_PRECISION_SCORE_WEIGHTED = "AveragePrecisionScoreWeighted" + """The arithmetic mean of the average precision score for each class, weighted by + #: the number of true instances in each class.""" + PRECISION_SCORE_WEIGHTED = "PrecisionScoreWeighted" + """The arithmetic mean of precision for each class, weighted by number of true instances in each + #: class.""" + + +class ClusterPurpose(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Intended usage of the cluster.""" + + FAST_PROD = "FastProd" + DENSE_PROD = "DenseProd" + DEV_TEST = "DevTest" + + +class ComputeInstanceAuthorizationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The Compute Instance Authorization type. Available values are personal (default).""" + + PERSONAL = "personal" + + +class ComputeInstanceState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Current state of an ComputeInstance.""" + + CREATING = "Creating" + CREATE_FAILED = "CreateFailed" + DELETING = "Deleting" + RUNNING = "Running" + RESTARTING = "Restarting" + RESIZING = "Resizing" + JOB_RUNNING = "JobRunning" + SETTING_UP = "SettingUp" + SETUP_FAILED = "SetupFailed" + STARTING = "Starting" + STOPPED = "Stopped" + STOPPING = "Stopping" + USER_SETTING_UP = "UserSettingUp" + USER_SETUP_FAILED = "UserSetupFailed" + UNKNOWN = "Unknown" + UNUSABLE = "Unusable" + + +class ComputePowerAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """[Required] The compute power action.""" + + START = "Start" + STOP = "Stop" + + +class ComputeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of compute.""" + + AKS = "AKS" + KUBERNETES = "Kubernetes" + AML_COMPUTE = "AmlCompute" + COMPUTE_INSTANCE = "ComputeInstance" + DATA_FACTORY = "DataFactory" + VIRTUAL_MACHINE = "VirtualMachine" + HD_INSIGHT = "HDInsight" + DATABRICKS = "Databricks" + DATA_LAKE_ANALYTICS = "DataLakeAnalytics" + SYNAPSE_SPARK = "SynapseSpark" + + +class ConnectionAuthType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Authentication type of the connection target.""" + + PAT = "PAT" + MANAGED_IDENTITY = "ManagedIdentity" + USERNAME_PASSWORD = "UsernamePassword" + NONE = "None" + SAS = "SAS" + SERVICE_PRINCIPAL = "ServicePrincipal" + ACCESS_KEY = "AccessKey" + API_KEY = "ApiKey" + CUSTOM_KEYS = "CustomKeys" + + +class ConnectionCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Category of the connection.""" + + PYTHON_FEED = "PythonFeed" + CONTAINER_REGISTRY = "ContainerRegistry" + GIT = "Git" + S3 = "S3" + SNOWFLAKE = "Snowflake" + AZURE_SQL_DB = "AzureSqlDb" + AZURE_SYNAPSE_ANALYTICS = "AzureSynapseAnalytics" + AZURE_MY_SQL_DB = "AzureMySqlDb" + AZURE_POSTGRES_DB = "AzurePostgresDb" + ADLS_GEN2 = "ADLSGen2" + REDIS = "Redis" + API_KEY = "ApiKey" + AZURE_OPEN_AI = "AzureOpenAI" + COGNITIVE_SEARCH = "CognitiveSearch" + COGNITIVE_SERVICE = "CognitiveService" + CUSTOM_KEYS = "CustomKeys" + + +class ContainerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of container to retrieve logs from.""" + + STORAGE_INITIALIZER = "StorageInitializer" + """The container used to download models and score script.""" + INFERENCE_SERVER = "InferenceServer" + """The container used to serve user's request.""" + MODEL_DATA_COLLECTOR = "ModelDataCollector" + """The container used to collect payload and custom logging when mdc is enabled.""" + + +class CreatedByType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of identity that created the resource.""" + + USER = "User" + APPLICATION = "Application" + MANAGED_IDENTITY = "ManagedIdentity" + KEY = "Key" + + +class CredentialsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the datastore credentials type.""" + + ACCOUNT_KEY = "AccountKey" + CERTIFICATE = "Certificate" + NONE = "None" + SAS = "Sas" + SERVICE_PRINCIPAL = "ServicePrincipal" + KERBEROS_KEYTAB = "KerberosKeytab" + KERBEROS_PASSWORD = "KerberosPassword" + + +class DataAvailabilityStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """DataAvailabilityStatus.""" + + NONE = "None" + PENDING = "Pending" + INCOMPLETE = "Incomplete" + COMPLETE = "Complete" + + +class DataCollectionMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """DataCollectionMode.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class DataImportSourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the type of data.""" + + DATABASE = "database" + FILE_SYSTEM = "file_system" + + +class DatastoreType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the datastore contents type.""" + + AZURE_BLOB = "AzureBlob" + AZURE_DATA_LAKE_GEN1 = "AzureDataLakeGen1" + AZURE_DATA_LAKE_GEN2 = "AzureDataLakeGen2" + AZURE_FILE = "AzureFile" + HDFS = "Hdfs" + ONE_LAKE = "OneLake" + + +class DataType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the type of data.""" + + URI_FILE = "uri_file" + URI_FOLDER = "uri_folder" + MLTABLE = "mltable" + + +class DeploymentProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Possible values for DeploymentProvisioningState.""" + + CREATING = "Creating" + DELETING = "Deleting" + SCALING = "Scaling" + UPDATING = "Updating" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + + +class DiagnoseResultLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Level of workspace setup error.""" + + WARNING = "Warning" + ERROR = "Error" + INFORMATION = "Information" + + +class DistributionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the job distribution type.""" + + PY_TORCH = "PyTorch" + TENSOR_FLOW = "TensorFlow" + MPI = "Mpi" + RAY = "Ray" + + +class EarlyTerminationPolicyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """EarlyTerminationPolicyType.""" + + BANDIT = "Bandit" + MEDIAN_STOPPING = "MedianStopping" + TRUNCATION_SELECTION = "TruncationSelection" + + +class EgressPublicNetworkAccessType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine whether PublicNetworkAccess is Enabled or Disabled for egress of a + deployment. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class EmailNotificationEnableType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the email notification type.""" + + JOB_COMPLETED = "JobCompleted" + JOB_FAILED = "JobFailed" + JOB_CANCELLED = "JobCancelled" + + +class EncryptionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Indicates whether or not the encryption is enabled for the workspace.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class EndpointAuthMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine endpoint authentication mode.""" + + AML_TOKEN = "AMLToken" + KEY = "Key" + AAD_TOKEN = "AADToken" + + +class EndpointComputeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine endpoint compute type.""" + + MANAGED = "Managed" + KUBERNETES = "Kubernetes" + AZURE_ML_COMPUTE = "AzureMLCompute" + + +class EndpointProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of endpoint provisioning.""" + + CREATING = "Creating" + DELETING = "Deleting" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + UPDATING = "Updating" + CANCELED = "Canceled" + + +class EndpointServiceConnectionStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Connection status of the service consumer with the service provider.""" + + APPROVED = "Approved" + PENDING = "Pending" + REJECTED = "Rejected" + DISCONNECTED = "Disconnected" + TIMEOUT = "Timeout" + + +class EnvironmentType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Environment type is either user created or curated by Azure ML service.""" + + CURATED = "Curated" + USER_CREATED = "UserCreated" + + +class EnvironmentVariableType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the Environment Variable. Possible values are: local - For local variable.""" + + LOCAL = "local" + + +class ExportFormatType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The format of exported labels.""" + + DATASET = "Dataset" + COCO = "Coco" + CSV = "CSV" + + +class FeatureAttributionMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """FeatureAttributionMetric.""" + + NORMALIZED_DISCOUNTED_CUMULATIVE_GAIN = "NormalizedDiscountedCumulativeGain" + """The Normalized Discounted Cumulative Gain metric.""" + + +class FeatureDataType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """FeatureDataType.""" + + STRING = "String" + INTEGER = "Integer" + LONG = "Long" + FLOAT = "Float" + DOUBLE = "Double" + BINARY = "Binary" + DATETIME = "Datetime" + BOOLEAN = "Boolean" + + +class FeatureLags(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Flag for generating lags for the numeric features.""" + + NONE = "None" + """No feature lags generated.""" + AUTO = "Auto" + """System auto-generates feature lags.""" + + +class FeaturizationMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Featurization mode - determines data featurization mode.""" + + AUTO = "Auto" + """Auto mode, system performs featurization without any custom featurization inputs.""" + CUSTOM = "Custom" + """Custom featurization.""" + OFF = "Off" + """Featurization off. 'Forecasting' task cannot use this value.""" + + +class ForecastHorizonMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine forecast horizon selection mode.""" + + AUTO = "Auto" + """Forecast horizon to be determined automatically.""" + CUSTOM = "Custom" + """Use the custom forecast horizon.""" + + +class ForecastingModels(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for all forecasting models supported by AutoML.""" + + AUTO_ARIMA = "AutoArima" + """Auto-Autoregressive Integrated Moving Average (ARIMA) model uses time-series data and + #: statistical analysis to interpret the data and make future predictions. + #: This model aims to explain data by using time series data on its past values and uses linear + #: regression to make predictions.""" + PROPHET = "Prophet" + """Prophet is a procedure for forecasting time series data based on an additive model where + #: non-linear trends are fit with yearly, weekly, and daily seasonality, plus holiday effects. + #: It works best with time series that have strong seasonal effects and several seasons of + #: historical data. Prophet is robust to missing data and shifts in the trend, and typically + #: handles outliers well.""" + NAIVE = "Naive" + """The Naive forecasting model makes predictions by carrying forward the latest target value for + #: each time-series in the training data.""" + SEASONAL_NAIVE = "SeasonalNaive" + """The Seasonal Naive forecasting model makes predictions by carrying forward the latest season of + #: target values for each time-series in the training data.""" + AVERAGE = "Average" + """The Average forecasting model makes predictions by carrying forward the average of the target + #: values for each time-series in the training data.""" + SEASONAL_AVERAGE = "SeasonalAverage" + """The Seasonal Average forecasting model makes predictions by carrying forward the average value + #: of the latest season of data for each time-series in the training data.""" + EXPONENTIAL_SMOOTHING = "ExponentialSmoothing" + """Exponential smoothing is a time series forecasting method for univariate data that can be + #: extended to support data with a systematic trend or seasonal component.""" + ARIMAX = "Arimax" + """An Autoregressive Integrated Moving Average with Explanatory Variable (ARIMAX) model can be + #: viewed as a multiple regression model with one or more autoregressive (AR) terms and/or one or + #: more moving average (MA) terms. + #: This method is suitable for forecasting when data is stationary/non stationary, and + #: multivariate with any type of data pattern, i.e., level/trend /seasonality/cyclicity.""" + TCN_FORECASTER = "TCNForecaster" + """TCNForecaster: Temporal Convolutional Networks Forecaster. //TODO: Ask forecasting team for + #: brief intro.""" + ELASTIC_NET = "ElasticNet" + """Elastic net is a popular type of regularized linear regression that combines two popular + #: penalties, specifically the L1 and L2 penalty functions.""" + GRADIENT_BOOSTING = "GradientBoosting" + """The technique of transiting week learners into a strong learner is called Boosting. The + #: gradient boosting algorithm process works on this theory of execution.""" + DECISION_TREE = "DecisionTree" + """Decision Trees are a non-parametric supervised learning method used for both classification and + #: regression tasks. + #: The goal is to create a model that predicts the value of a target variable by learning simple + #: decision rules inferred from the data features.""" + KNN = "KNN" + """K-nearest neighbors (KNN) algorithm uses 'feature similarity' to predict the values of new + #: datapoints + #: which further means that the new data point will be assigned a value based on how closely it + #: matches the points in the training set.""" + LASSO_LARS = "LassoLars" + """Lasso model fit with Least Angle Regression a.k.a. Lars. It is a Linear Model trained with an + #: L1 prior as regularizer.""" + SGD = "SGD" + """SGD: Stochastic gradient descent is an optimization algorithm often used in machine learning + #: applications + #: to find the model parameters that correspond to the best fit between predicted and actual + #: outputs. + #: It's an inexact but powerful technique.""" + RANDOM_FOREST = "RandomForest" + """Random forest is a supervised learning algorithm. + #: The "forest" it builds, is an ensemble of decision trees, usually trained with the bagging + #: method. + #: The general idea of the bagging method is that a combination of learning models increases the + #: overall result.""" + EXTREME_RANDOM_TREES = "ExtremeRandomTrees" + """Extreme Trees is an ensemble machine learning algorithm that combines the predictions from many + #: decision trees. It is related to the widely used random forest algorithm.""" + LIGHT_GBM = "LightGBM" + """LightGBM is a gradient boosting framework that uses tree based learning algorithms.""" + XG_BOOST_REGRESSOR = "XGBoostRegressor" + """XGBoostRegressor: Extreme Gradient Boosting Regressor is a supervised machine learning model + #: using ensemble of base learners.""" + + +class ForecastingPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for Forecasting task.""" + + SPEARMAN_CORRELATION = "SpearmanCorrelation" + """The Spearman's rank coefficient of correlation is a non-parametric measure of rank correlation.""" + NORMALIZED_ROOT_MEAN_SQUARED_ERROR = "NormalizedRootMeanSquaredError" + """The Normalized Root Mean Squared Error (NRMSE) the RMSE facilitates the comparison between + #: models with different scales.""" + R2_SCORE = "R2Score" + """The R2 score is one of the performance evaluation measures for forecasting-based machine + #: learning models.""" + NORMALIZED_MEAN_ABSOLUTE_ERROR = "NormalizedMeanAbsoluteError" + """The Normalized Mean Absolute Error (NMAE) is a validation metric to compare the Mean Absolute + #: Error (MAE) of (time) series with different scales.""" + + +class GenerationSafetyQualityMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Generation safety quality metric enum.""" + + ACCEPTABLE_GROUNDEDNESS_SCORE_PER_INSTANCE = "AcceptableGroundednessScorePerInstance" + AGGREGATED_GROUNDEDNESS_PASS_RATE = "AggregatedGroundednessPassRate" + ACCEPTABLE_COHERENCE_SCORE_PER_INSTANCE = "AcceptableCoherenceScorePerInstance" + AGGREGATED_COHERENCE_PASS_RATE = "AggregatedCoherencePassRate" + ACCEPTABLE_FLUENCY_SCORE_PER_INSTANCE = "AcceptableFluencyScorePerInstance" + AGGREGATED_FLUENCY_PASS_RATE = "AggregatedFluencyPassRate" + ACCEPTABLE_SIMILARITY_SCORE_PER_INSTANCE = "AcceptableSimilarityScorePerInstance" + AGGREGATED_SIMILARITY_PASS_RATE = "AggregatedSimilarityPassRate" + ACCEPTABLE_RELEVANCE_SCORE_PER_INSTANCE = "AcceptableRelevanceScorePerInstance" + AGGREGATED_RELEVANCE_PASS_RATE = "AggregatedRelevancePassRate" + + +class GenerationTokenStatisticsMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Generation token statistics metric enum.""" + + TOTAL_TOKEN_COUNT = "TotalTokenCount" + TOTAL_TOKEN_COUNT_PER_GROUP = "TotalTokenCountPerGroup" + + +class Goal(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Defines supported metric goals for hyperparameter tuning.""" + + MINIMIZE = "Minimize" + MAXIMIZE = "Maximize" + + +class IdentityConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine identity framework.""" + + MANAGED = "Managed" + AML_TOKEN = "AMLToken" + USER_IDENTITY = "UserIdentity" + + +class ImageAnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Annotation type of image data.""" + + CLASSIFICATION = "Classification" + BOUNDING_BOX = "BoundingBox" + INSTANCE_SEGMENTATION = "InstanceSegmentation" + + +class ImageType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the image. Possible values are: docker - For docker images. azureml - For AzureML + images. + """ + + DOCKER = "docker" + AZUREML = "azureml" + + +class IncrementalDataRefresh(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Whether IncrementalDataRefresh is enabled.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class InferencingServerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Inferencing server type for various targets.""" + + AZURE_ML_ONLINE = "AzureMLOnline" + AZURE_ML_BATCH = "AzureMLBatch" + TRITON = "Triton" + CUSTOM = "Custom" + + +class InputDeliveryMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the input data delivery mode.""" + + READ_ONLY_MOUNT = "ReadOnlyMount" + READ_WRITE_MOUNT = "ReadWriteMount" + DOWNLOAD = "Download" + DIRECT = "Direct" + EVAL_MOUNT = "EvalMount" + EVAL_DOWNLOAD = "EvalDownload" + + +class InputPathType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Input path type for package inputs.""" + + URL = "Url" + PATH_ID = "PathId" + PATH_VERSION = "PathVersion" + + +class InstanceSegmentationPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for InstanceSegmentation tasks.""" + + MEAN_AVERAGE_PRECISION = "MeanAveragePrecision" + """Mean Average Precision (MAP) is the average of AP (Average Precision). + #: AP is calculated for each class and averaged to get the MAP.""" + + +class IsolationMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Isolation mode for the managed network of a machine learning workspace.""" + + DISABLED = "Disabled" + ALLOW_INTERNET_OUTBOUND = "AllowInternetOutbound" + ALLOW_ONLY_APPROVED_OUTBOUND = "AllowOnlyApprovedOutbound" + + +class JobInputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the Job Input Type.""" + + LITERAL = "literal" + URI_FILE = "uri_file" + URI_FOLDER = "uri_folder" + MLTABLE = "mltable" + CUSTOM_MODEL = "custom_model" + MLFLOW_MODEL = "mlflow_model" + TRITON_MODEL = "triton_model" + + +class JobLimitsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """JobLimitsType.""" + + COMMAND = "Command" + SWEEP = "Sweep" + + +class JobOutputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the Job Output Type.""" + + URI_FILE = "uri_file" + URI_FOLDER = "uri_folder" + MLTABLE = "mltable" + CUSTOM_MODEL = "custom_model" + MLFLOW_MODEL = "mlflow_model" + TRITON_MODEL = "triton_model" + + +class JobProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the job provisioning state.""" + + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + IN_PROGRESS = "InProgress" + + +class JobStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The status of a job.""" + + NOT_STARTED = "NotStarted" + """Run hasn't started yet.""" + STARTING = "Starting" + """Run has started. The user has a run ID.""" + PROVISIONING = "Provisioning" + """(Not used currently) It will be used if ES is creating the compute target.""" + PREPARING = "Preparing" + """The run environment is being prepared.""" + QUEUED = "Queued" + """The job is queued in the compute target. For example, in BatchAI the job is in queued state, + #: while waiting for all required nodes to be ready.""" + RUNNING = "Running" + """The job started to run in the compute target.""" + FINALIZING = "Finalizing" + """Job is completed in the target. It is in output collection state now.""" + CANCEL_REQUESTED = "CancelRequested" + """Cancellation has been requested for the job.""" + COMPLETED = "Completed" + """Job completed successfully. This reflects that both the job itself and output collection states + #: completed successfully""" + FAILED = "Failed" + """Job failed.""" + CANCELED = "Canceled" + """Following cancellation request, the job is now successfully canceled.""" + NOT_RESPONDING = "NotResponding" + """When heartbeat is enabled, if the run isn't updating any information to RunHistory then the run + #: goes to NotResponding state. + #: NotResponding is the only state that is exempt from strict transition orders. A run can go from + #: NotResponding to any of the previous states.""" + PAUSED = "Paused" + """The job is paused by users. Some adjustment to labeling jobs can be made only in paused state.""" + UNKNOWN = "Unknown" + """Default job status if not mapped to all other statuses""" + SCHEDULED = "Scheduled" + """The job is in a scheduled state. Job is not in any active state.""" + + +class JobTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the job tier.""" + + NULL = "Null" + SPOT = "Spot" + BASIC = "Basic" + STANDARD = "Standard" + PREMIUM = "Premium" + + +class JobType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the type of job.""" + + AUTO_ML = "AutoML" + COMMAND = "Command" + LABELING = "Labeling" + SWEEP = "Sweep" + PIPELINE = "Pipeline" + SPARK = "Spark" + + +class KeyType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """KeyType.""" + + PRIMARY = "Primary" + SECONDARY = "Secondary" + + +class LearningRateScheduler(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Learning rate scheduler enum.""" + + NONE = "None" + """No learning rate scheduler selected.""" + WARMUP_COSINE = "WarmupCosine" + """Cosine Annealing With Warmup.""" + STEP = "Step" + """Step learning rate scheduler.""" + + +class ListViewType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ListViewType.""" + + ACTIVE_ONLY = "ActiveOnly" + ARCHIVED_ONLY = "ArchivedOnly" + ALL = "All" + + +class LoadBalancerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Load Balancer Type.""" + + PUBLIC_IP = "PublicIp" + INTERNAL_LOAD_BALANCER = "InternalLoadBalancer" + + +class LogTrainingMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """LogTrainingMetrics.""" + + ENABLE = "Enable" + """Enable compute and log training metrics.""" + DISABLE = "Disable" + """Disable compute and log training metrics.""" + + +class LogValidationLoss(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """LogValidationLoss.""" + + ENABLE = "Enable" + """Enable compute and log validation metrics.""" + DISABLE = "Disable" + """Disable compute and log validation metrics.""" + + +class LogVerbosity(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for setting log verbosity.""" + + NOT_SET = "NotSet" + """No logs emitted.""" + DEBUG = "Debug" + """Debug and above log statements logged.""" + INFO = "Info" + """Info and above log statements logged.""" + WARNING = "Warning" + """Warning and above log statements logged.""" + ERROR = "Error" + """Error and above log statements logged.""" + CRITICAL = "Critical" + """Only critical statements logged.""" + + +class ManagedNetworkStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Status for the managed network of a machine learning workspace.""" + + INACTIVE = "Inactive" + ACTIVE = "Active" + + +class ManagedServiceIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of managed service identity (where both SystemAssigned and UserAssigned types are + allowed). + """ + + NONE = "None" + SYSTEM_ASSIGNED = "SystemAssigned" + USER_ASSIGNED = "UserAssigned" + SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" + + +class MaterializationStoreType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MaterializationStoreType.""" + + NONE = "None" + ONLINE = "Online" + OFFLINE = "Offline" + ONLINE_AND_OFFLINE = "OnlineAndOffline" + + +class MediaType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Media type of data asset.""" + + IMAGE = "Image" + TEXT = "Text" + + +class MLAssistConfigurationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MLAssistConfigurationType.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class MlflowAutologger(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Indicates whether mlflow autologger is enabled for notebooks.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class MLFlowAutologgerState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the state of mlflow autologger.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class ModelSize(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Image model size.""" + + NONE = "None" + """No value selected.""" + SMALL = "Small" + """Small size.""" + MEDIUM = "Medium" + """Medium size.""" + LARGE = "Large" + """Large size.""" + EXTRA_LARGE = "ExtraLarge" + """Extra large size.""" + + +class ModelTaskType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Model task type enum.""" + + CLASSIFICATION = "Classification" + REGRESSION = "Regression" + QUESTION_ANSWERING = "QuestionAnswering" + + +class MonitorComputeIdentityType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Monitor compute identity type enum.""" + + AML_TOKEN = "AmlToken" + """Authenticates through user's AML token.""" + MANAGED_IDENTITY = "ManagedIdentity" + """Authenticates through a user-provided managed identity.""" + + +class MonitorComputeType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Monitor compute type enum.""" + + SERVERLESS_SPARK = "ServerlessSpark" + """Serverless Spark compute.""" + + +class MonitoringAlertNotificationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MonitoringAlertNotificationType.""" + + AZURE_MONITOR = "AzureMonitor" + """Settings for Azure Monitor based alerting.""" + EMAIL = "Email" + """Settings for AML email notifications.""" + + +class MonitoringFeatureDataType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MonitoringFeatureDataType.""" + + NUMERICAL = "Numerical" + """Used for features of numerical data type.""" + CATEGORICAL = "Categorical" + """Used for features of categorical data type.""" + + +class MonitoringFeatureFilterType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MonitoringFeatureFilterType.""" + + ALL_FEATURES = "AllFeatures" + """Includes all features.""" + TOP_N_BY_ATTRIBUTION = "TopNByAttribution" + """Only includes the top contributing features, measured by feature attribution.""" + FEATURE_SUBSET = "FeatureSubset" + """Includes a user-defined subset of features.""" + + +class MonitoringInputDataType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Monitoring input data type enum.""" + + STATIC = "Static" + """An input data with a fixed window size.""" + TRAILING = "Trailing" + """An input data which trailing relatively to the monitor's current run.""" + FIXED = "Fixed" + """An input data with tabular format which doesn't require preprocessing.""" + + +class MonitoringModelType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MonitoringModelType.""" + + CLASSIFICATION = "Classification" + """A model trained for classification tasks.""" + REGRESSION = "Regression" + """A model trained for regressions tasks.""" + + +class MonitoringNotificationMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MonitoringNotificationMode.""" + + DISABLED = "Disabled" + """Disabled notifications will not produce emails/metrics leveraged for alerting.""" + ENABLED = "Enabled" + """Enabled notification will produce emails/metrics leveraged for alerting.""" + + +class MonitoringSignalType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """MonitoringSignalType.""" + + DATA_DRIFT = "DataDrift" + """Tracks model input data distribution change, comparing against training data or past production + #: data.""" + PREDICTION_DRIFT = "PredictionDrift" + """Tracks prediction result data distribution change, comparing against validation/test label data + #: or past production data.""" + DATA_QUALITY = "DataQuality" + """Tracks model input data integrity.""" + FEATURE_ATTRIBUTION_DRIFT = "FeatureAttributionDrift" + """Tracks feature importance change in production, comparing against feature importance at + #: training time.""" + CUSTOM = "Custom" + """Tracks a custom signal provided by users.""" + MODEL_PERFORMANCE = "ModelPerformance" + """Tracks model performance based on ground truth data.""" + GENERATION_SAFETY_QUALITY = "GenerationSafetyQuality" + """Tracks the safety and quality of generated content.""" + GENERATION_TOKEN_STATISTICS = "GenerationTokenStatistics" + """Tracks the token usage of generative endpoints.""" + + +class MountAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Mount Action.""" + + MOUNT = "Mount" + UNMOUNT = "Unmount" + + +class MountState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Mount state.""" + + MOUNT_REQUESTED = "MountRequested" + MOUNTED = "Mounted" + MOUNT_FAILED = "MountFailed" + UNMOUNT_REQUESTED = "UnmountRequested" + UNMOUNT_FAILED = "UnmountFailed" + UNMOUNTED = "Unmounted" + + +class MultiSelect(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Whether multiSelect is enabled.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class NCrossValidationsMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Determines how N-Cross validations value is determined.""" + + AUTO = "Auto" + """Determine N-Cross validations value automatically. Supported only for 'Forecasting' AutoML + #: task.""" + CUSTOM = "Custom" + """Use custom N-Cross validations value.""" + + +class Network(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """network of this container.""" + + BRIDGE = "Bridge" + HOST = "Host" + + +class NlpLearningRateScheduler(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum of learning rate schedulers that aligns with those supported by HF.""" + + NONE = "None" + """No learning rate schedule.""" + LINEAR = "Linear" + """Linear warmup and decay.""" + COSINE = "Cosine" + """Linear warmup then cosine decay.""" + COSINE_WITH_RESTARTS = "CosineWithRestarts" + """Linear warmup, cosine decay, then restart to initial LR.""" + POLYNOMIAL = "Polynomial" + """Increase linearly then polynomially decay.""" + CONSTANT = "Constant" + """Constant learning rate.""" + CONSTANT_WITH_WARMUP = "ConstantWithWarmup" + """Linear warmup followed by constant value.""" + + +class NodeState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the compute node. Values are idle, running, preparing, unusable, leaving and + preempted. + """ + + IDLE = "idle" + RUNNING = "running" + PREPARING = "preparing" + UNUSABLE = "unusable" + LEAVING = "leaving" + PREEMPTED = "preempted" + + +class NodesValueType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The enumerated types for the nodes value.""" + + ALL = "All" + CUSTOM = "Custom" + + +class NumericalDataDriftMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """NumericalDataDriftMetric.""" + + JENSEN_SHANNON_DISTANCE = "JensenShannonDistance" + """The Jensen Shannon Distance (JSD) metric.""" + POPULATION_STABILITY_INDEX = "PopulationStabilityIndex" + """The Population Stability Index (PSI) metric.""" + NORMALIZED_WASSERSTEIN_DISTANCE = "NormalizedWassersteinDistance" + """The Normalized Wasserstein Distance metric.""" + TWO_SAMPLE_KOLMOGOROV_SMIRNOV_TEST = "TwoSampleKolmogorovSmirnovTest" + """The Two Sample Kolmogorov-Smirnov Test (two-sample K–S) metric.""" + + +class NumericalDataQualityMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """NumericalDataQualityMetric.""" + + NULL_VALUE_RATE = "NullValueRate" + """Calculates the rate of null values.""" + DATA_TYPE_ERROR_RATE = "DataTypeErrorRate" + """Calculates the rate of data type errors.""" + OUT_OF_BOUNDS_RATE = "OutOfBoundsRate" + """Calculates the rate values are out of bounds.""" + + +class NumericalPredictionDriftMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """NumericalPredictionDriftMetric.""" + + JENSEN_SHANNON_DISTANCE = "JensenShannonDistance" + """The Jensen Shannon Distance (JSD) metric.""" + POPULATION_STABILITY_INDEX = "PopulationStabilityIndex" + """The Population Stability Index (PSI) metric.""" + NORMALIZED_WASSERSTEIN_DISTANCE = "NormalizedWassersteinDistance" + """The Normalized Wasserstein Distance metric.""" + TWO_SAMPLE_KOLMOGOROV_SMIRNOV_TEST = "TwoSampleKolmogorovSmirnovTest" + """The Two Sample Kolmogorov-Smirnov Test (two-sample K–S) metric.""" + + +class ObjectDetectionPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for Image ObjectDetection task.""" + + MEAN_AVERAGE_PRECISION = "MeanAveragePrecision" + """Mean Average Precision (MAP) is the average of AP (Average Precision). + #: AP is calculated for each class and averaged to get the MAP.""" + + +class OneLakeArtifactType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine OneLake artifact type.""" + + LAKE_HOUSE = "LakeHouse" + + +class OperatingSystemType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of operating system.""" + + LINUX = "Linux" + WINDOWS = "Windows" + + +class OperationName(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Name of the last operation.""" + + CREATE = "Create" + START = "Start" + STOP = "Stop" + RESTART = "Restart" + RESIZE = "Resize" + REIMAGE = "Reimage" + DELETE = "Delete" + + +class OperationStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Operation status.""" + + IN_PROGRESS = "InProgress" + SUCCEEDED = "Succeeded" + CREATE_FAILED = "CreateFailed" + START_FAILED = "StartFailed" + STOP_FAILED = "StopFailed" + RESTART_FAILED = "RestartFailed" + RESIZE_FAILED = "ResizeFailed" + REIMAGE_FAILED = "ReimageFailed" + DELETE_FAILED = "DeleteFailed" + + +class OperationTrigger(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Trigger of operation.""" + + USER = "User" + SCHEDULE = "Schedule" + IDLE_SHUTDOWN = "IdleShutdown" + + +class OrderString(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """OrderString.""" + + CREATED_AT_DESC = "CreatedAtDesc" + CREATED_AT_ASC = "CreatedAtAsc" + UPDATED_AT_DESC = "UpdatedAtDesc" + UPDATED_AT_ASC = "UpdatedAtAsc" + + +class Origin(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The intended executor of the operation; as in Resource Based Access Control (RBAC) and audit + logs UX. Default value is "user,system". + """ + + USER = "user" + SYSTEM = "system" + USER_SYSTEM = "user,system" + + +class OsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Compute OS Type.""" + + LINUX = "Linux" + WINDOWS = "Windows" + + +class OutputDeliveryMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Output data delivery mode enums.""" + + READ_WRITE_MOUNT = "ReadWriteMount" + UPLOAD = "Upload" + DIRECT = "Direct" + + +class PackageBuildState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Package build state returned in package response.""" + + NOT_STARTED = "NotStarted" + RUNNING = "Running" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + + +class PackageInputDeliveryMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Mounting type of the model or the inputs.""" + + COPY = "Copy" + DOWNLOAD = "Download" + + +class PackageInputType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of the inputs.""" + + URI_FILE = "UriFile" + URI_FOLDER = "UriFolder" + + +class PatchStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The os patching status.""" + + COMPLETED_WITH_WARNINGS = "CompletedWithWarnings" + FAILED = "Failed" + IN_PROGRESS = "InProgress" + SUCCEEDED = "Succeeded" + UNKNOWN = "Unknown" + + +class PendingUploadCredentialType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the PendingUpload credentials type.""" + + SAS = "SAS" + + +class PendingUploadType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of storage to use for the pending upload location.""" + + NONE = "None" + TEMPORARY_BLOB_REFERENCE = "TemporaryBlobReference" + + +class PrivateEndpointConnectionProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The current provisioning state.""" + + SUCCEEDED = "Succeeded" + CREATING = "Creating" + DELETING = "Deleting" + FAILED = "Failed" + + +class ProtectionLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Protection level associated with the Intellectual Property.""" + + ALL = "All" + """All means Intellectual Property is fully protected.""" + NONE = "None" + """None means it is not an Intellectual Property.""" + + +class Protocol(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Protocol over which communication will happen over this endpoint.""" + + TCP = "tcp" + UDP = "udp" + HTTP = "http" + + +class ProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The provision state of the cluster. Valid values are Unknown, Updating, Provisioning, + Succeeded, and Failed. + """ + + UNKNOWN = "Unknown" + UPDATING = "Updating" + CREATING = "Creating" + DELETING = "Deleting" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + + +class ProvisioningStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The current deployment state of schedule.""" + + COMPLETED = "Completed" + PROVISIONING = "Provisioning" + FAILED = "Failed" + + +class PublicNetworkAccessType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine whether PublicNetworkAccess is Enabled or Disabled.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class QuotaUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """An enum describing the unit of quota measurement.""" + + COUNT = "Count" + + +class RandomSamplingAlgorithmRule(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The specific type of random algorithm.""" + + RANDOM = "Random" + SOBOL = "Sobol" + + +class RecurrenceFrequency(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to describe the frequency of a recurrence schedule.""" + + MINUTE = "Minute" + """Minute frequency""" + HOUR = "Hour" + """Hour frequency""" + DAY = "Day" + """Day frequency""" + WEEK = "Week" + """Week frequency""" + MONTH = "Month" + """Month frequency""" + + +class ReferenceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine which reference method to use for an asset.""" + + ID = "Id" + DATA_PATH = "DataPath" + OUTPUT_PATH = "OutputPath" + + +class RegressionModelPerformanceMetric(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """RegressionModelPerformanceMetric.""" + + MEAN_ABSOLUTE_ERROR = "MeanAbsoluteError" + """The Mean Absolute Error (MAE) metric.""" + ROOT_MEAN_SQUARED_ERROR = "RootMeanSquaredError" + """The Root Mean Squared Error (RMSE) metric.""" + MEAN_SQUARED_ERROR = "MeanSquaredError" + """The Mean Squared Error (MSE) metric.""" + + +class RegressionModels(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum for all Regression models supported by AutoML.""" + + ELASTIC_NET = "ElasticNet" + """Elastic net is a popular type of regularized linear regression that combines two popular + #: penalties, specifically the L1 and L2 penalty functions.""" + GRADIENT_BOOSTING = "GradientBoosting" + """The technique of transiting week learners into a strong learner is called Boosting. The + #: gradient boosting algorithm process works on this theory of execution.""" + DECISION_TREE = "DecisionTree" + """Decision Trees are a non-parametric supervised learning method used for both classification and + #: regression tasks. + #: The goal is to create a model that predicts the value of a target variable by learning simple + #: decision rules inferred from the data features.""" + KNN = "KNN" + """K-nearest neighbors (KNN) algorithm uses 'feature similarity' to predict the values of new + #: datapoints + #: which further means that the new data point will be assigned a value based on how closely it + #: matches the points in the training set.""" + LASSO_LARS = "LassoLars" + """Lasso model fit with Least Angle Regression a.k.a. Lars. It is a Linear Model trained with an + #: L1 prior as regularizer.""" + SGD = "SGD" + """SGD: Stochastic gradient descent is an optimization algorithm often used in machine learning + #: applications + #: to find the model parameters that correspond to the best fit between predicted and actual + #: outputs. + #: It's an inexact but powerful technique.""" + RANDOM_FOREST = "RandomForest" + """Random forest is a supervised learning algorithm. + #: The "forest" it builds, is an ensemble of decision trees, usually trained with the bagging + #: method. + #: The general idea of the bagging method is that a combination of learning models increases the + #: overall result.""" + EXTREME_RANDOM_TREES = "ExtremeRandomTrees" + """Extreme Trees is an ensemble machine learning algorithm that combines the predictions from many + #: decision trees. It is related to the widely used random forest algorithm.""" + LIGHT_GBM = "LightGBM" + """LightGBM is a gradient boosting framework that uses tree based learning algorithms.""" + XG_BOOST_REGRESSOR = "XGBoostRegressor" + """XGBoostRegressor: Extreme Gradient Boosting Regressor is a supervised machine learning model + #: using ensemble of base learners.""" + + +class RegressionPrimaryMetrics(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Primary metrics for Regression task.""" + + SPEARMAN_CORRELATION = "SpearmanCorrelation" + """The Spearman's rank coefficient of correlation is a nonparametric measure of rank correlation.""" + NORMALIZED_ROOT_MEAN_SQUARED_ERROR = "NormalizedRootMeanSquaredError" + """The Normalized Root Mean Squared Error (NRMSE) the RMSE facilitates the comparison between + #: models with different scales.""" + R2_SCORE = "R2Score" + """The R2 score is one of the performance evaluation measures for forecasting-based machine + #: learning models.""" + NORMALIZED_MEAN_ABSOLUTE_ERROR = "NormalizedMeanAbsoluteError" + """The Normalized Mean Absolute Error (NMAE) is a validation metric to compare the Mean Absolute + #: Error (MAE) of (time) series with different scales.""" + + +class RemoteLoginPortPublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh + port is closed on all nodes of the cluster. Enabled - Indicates that the public ssh port is + open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed + on all nodes of the cluster if VNet is defined, else is open all public nodes. It can be + default only during cluster creation time, after creation it will be either enabled or + disabled. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + NOT_SPECIFIED = "NotSpecified" + + +class RollingRateType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """RollingRateType.""" + + YEAR = "Year" + MONTH = "Month" + DAY = "Day" + HOUR = "Hour" + MINUTE = "Minute" + + +class RuleAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The action enum for networking rule.""" + + ALLOW = "Allow" + DENY = "Deny" + + +class RuleCategory(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Category of a managed network Outbound Rule of a machine learning workspace.""" + + REQUIRED = "Required" + RECOMMENDED = "Recommended" + USER_DEFINED = "UserDefined" + + +class RuleStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of a managed network Outbound Rule of a machine learning workspace.""" + + INACTIVE = "Inactive" + ACTIVE = "Active" + + +class RuleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of a managed network Outbound Rule of a machine learning workspace.""" + + FQDN = "FQDN" + PRIVATE_ENDPOINT = "PrivateEndpoint" + SERVICE_TAG = "ServiceTag" + + +class SamplingAlgorithmType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """SamplingAlgorithmType.""" + + GRID = "Grid" + RANDOM = "Random" + BAYESIAN = "Bayesian" + + +class ScaleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ScaleType.""" + + DEFAULT = "Default" + TARGET_UTILIZATION = "TargetUtilization" + + +class ScheduleActionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ScheduleActionType.""" + + CREATE_JOB = "CreateJob" + INVOKE_BATCH_ENDPOINT = "InvokeBatchEndpoint" + IMPORT_DATA = "ImportData" + CREATE_MONITOR = "CreateMonitor" + + +class ScheduleListViewType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ScheduleListViewType.""" + + ENABLED_ONLY = "EnabledOnly" + DISABLED_ONLY = "DisabledOnly" + ALL = "All" + + +class ScheduleProvisioningState(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The current deployment state of schedule.""" + + COMPLETED = "Completed" + PROVISIONING = "Provisioning" + FAILED = "Failed" + + +class ScheduleProvisioningStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ScheduleProvisioningStatus.""" + + CREATING = "Creating" + UPDATING = "Updating" + DELETING = "Deleting" + SUCCEEDED = "Succeeded" + FAILED = "Failed" + CANCELED = "Canceled" + + +class ScheduleStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Is the schedule enabled or disabled?.""" + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class SeasonalityMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Forecasting seasonality mode.""" + + AUTO = "Auto" + """Seasonality to be determined automatically.""" + CUSTOM = "Custom" + """Use the custom seasonality value.""" + + +class SecretsType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the datastore secrets type.""" + + ACCOUNT_KEY = "AccountKey" + CERTIFICATE = "Certificate" + SAS = "Sas" + SERVICE_PRINCIPAL = "ServicePrincipal" + KERBEROS_PASSWORD = "KerberosPassword" + KERBEROS_KEYTAB = "KerberosKeytab" + + +class ServiceDataAccessAuthIdentity(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """ServiceDataAccessAuthIdentity.""" + + NONE = "None" + """Do not use any identity for service data access.""" + WORKSPACE_SYSTEM_ASSIGNED_IDENTITY = "WorkspaceSystemAssignedIdentity" + """Use the system assigned managed identity of the Workspace to authenticate service data access.""" + WORKSPACE_USER_ASSIGNED_IDENTITY = "WorkspaceUserAssignedIdentity" + """Use the user assigned managed identity of the Workspace to authenticate service data access.""" + + +class ShortSeriesHandlingConfiguration(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The parameter defining how if AutoML should handle short time series.""" + + NONE = "None" + """Represents no/null value.""" + AUTO = "Auto" + """Short series will be padded if there are no long series, otherwise short series will be + #: dropped.""" + PAD = "Pad" + """All the short series will be padded.""" + DROP = "Drop" + """All the short series will be dropped.""" + + +class SkuScaleType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Node scaling setting for the compute sku.""" + + AUTOMATIC = "Automatic" + """Automatically scales node count.""" + MANUAL = "Manual" + """Node count scaled upon user request.""" + NONE = "None" + """Fixed set of nodes.""" + + +class SkuTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """This field is required to be implemented by the Resource Provider if the service has more than + one tier, but is not required on a PUT. + """ + + FREE = "Free" + BASIC = "Basic" + STANDARD = "Standard" + PREMIUM = "Premium" + + +class SourceType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Data source type.""" + + DATASET = "Dataset" + DATASTORE = "Datastore" + URI = "URI" + + +class SparkJobEntryType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """SparkJobEntryType.""" + + SPARK_JOB_PYTHON_ENTRY = "SparkJobPythonEntry" + SPARK_JOB_SCALA_ENTRY = "SparkJobScalaEntry" + + +class SshPublicAccess(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh + port is closed on this instance. Enabled - Indicates that the public ssh port is open and + accessible according to the VNet/subnet policy if applicable. + """ + + ENABLED = "Enabled" + DISABLED = "Disabled" + + +class SslConfigStatus(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enable or disable ssl for scoring.""" + + DISABLED = "Disabled" + ENABLED = "Enabled" + AUTO = "Auto" + + +class StackMetaLearnerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The meta-learner is a model trained on the output of the individual heterogeneous models. + Default meta-learners are LogisticRegression for classification tasks (or LogisticRegressionCV + if cross-validation is enabled) and ElasticNet for regression/forecasting tasks (or + ElasticNetCV if cross-validation is enabled). + This parameter can be one of the following strings: LogisticRegression, LogisticRegressionCV, + LightGBMClassifier, ElasticNet, ElasticNetCV, LightGBMRegressor, or LinearRegression. + """ + + NONE = "None" + LOGISTIC_REGRESSION = "LogisticRegression" + """Default meta-learners are LogisticRegression for classification tasks.""" + LOGISTIC_REGRESSION_CV = "LogisticRegressionCV" + """Default meta-learners are LogisticRegression for classification task when CV is on.""" + LIGHT_GBM_CLASSIFIER = "LightGBMClassifier" + ELASTIC_NET = "ElasticNet" + """Default meta-learners are LogisticRegression for regression task.""" + ELASTIC_NET_CV = "ElasticNetCV" + """Default meta-learners are LogisticRegression for regression task when CV is on.""" + LIGHT_GBM_REGRESSOR = "LightGBMRegressor" + LINEAR_REGRESSION = "LinearRegression" + + +class Status(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Status of update workspace quota.""" + + UNDEFINED = "Undefined" + SUCCESS = "Success" + FAILURE = "Failure" + INVALID_QUOTA_BELOW_CLUSTER_MINIMUM = "InvalidQuotaBelowClusterMinimum" + INVALID_QUOTA_EXCEEDS_SUBSCRIPTION_LIMIT = "InvalidQuotaExceedsSubscriptionLimit" + INVALID_VM_FAMILY_NAME = "InvalidVMFamilyName" + OPERATION_NOT_SUPPORTED_FOR_SKU = "OperationNotSupportedForSku" + OPERATION_NOT_ENABLED_FOR_REGION = "OperationNotEnabledForRegion" + + +class StatusMessageLevel(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """StatusMessageLevel.""" + + ERROR = "Error" + INFORMATION = "Information" + WARNING = "Warning" + + +class StochasticOptimizer(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Stochastic optimizer for image models.""" + + NONE = "None" + """No optimizer selected.""" + SGD = "Sgd" + """Stochastic Gradient Descent optimizer.""" + ADAM = "Adam" + """Adam is algorithm the optimizes stochastic objective functions based on adaptive estimates of + #: moments""" + ADAMW = "Adamw" + """AdamW is a variant of the optimizer Adam that has an improved implementation of weight decay.""" + + +class StorageAccountType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """type of this storage account.""" + + STANDARD_LRS = "Standard_LRS" + PREMIUM_LRS = "Premium_LRS" + + +class TargetAggregationFunction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Target aggregate function.""" + + NONE = "None" + """Represent no value set.""" + SUM = "Sum" + MAX = "Max" + MIN = "Min" + MEAN = "Mean" + + +class TargetLagsMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Target lags selection modes.""" + + AUTO = "Auto" + """Target lags to be determined automatically.""" + CUSTOM = "Custom" + """Use the custom target lags.""" + + +class TargetRollingWindowSizeMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Target rolling windows size mode.""" + + AUTO = "Auto" + """Determine rolling windows size automatically.""" + CUSTOM = "Custom" + """Use the specified rolling window size.""" + + +class TaskType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """AutoMLJob Task type.""" + + CLASSIFICATION = "Classification" + """Classification in machine learning and statistics is a supervised learning approach in which + #: the computer program learns from the data given to it and make new observations or + #: classifications.""" + REGRESSION = "Regression" + """Regression means to predict the value using the input data. Regression models are used to + #: predict a continuous value.""" + FORECASTING = "Forecasting" + """Forecasting is a special kind of regression task that deals with time-series data and creates + #: forecasting model + #: that can be used to predict the near future values based on the inputs.""" + IMAGE_CLASSIFICATION = "ImageClassification" + """Image Classification. Multi-class image classification is used when an image is classified with + #: only a single label + #: from a set of classes - e.g. each image is classified as either an image of a 'cat' or a 'dog' + #: or a 'duck'.""" + IMAGE_CLASSIFICATION_MULTILABEL = "ImageClassificationMultilabel" + """Image Classification Multilabel. Multi-label image classification is used when an image could + #: have one or more labels + #: from a set of labels - e.g. an image could be labeled with both 'cat' and 'dog'.""" + IMAGE_OBJECT_DETECTION = "ImageObjectDetection" + """Image Object Detection. Object detection is used to identify objects in an image and locate + #: each object with a + #: bounding box e.g. locate all dogs and cats in an image and draw a bounding box around each.""" + IMAGE_INSTANCE_SEGMENTATION = "ImageInstanceSegmentation" + """Image Instance Segmentation. Instance segmentation is used to identify objects in an image at + #: the pixel level, + #: drawing a polygon around each object in the image.""" + TEXT_CLASSIFICATION = "TextClassification" + """Text classification (also known as text tagging or text categorization) is the process of + #: sorting texts into categories. + #: Categories are mutually exclusive.""" + TEXT_CLASSIFICATION_MULTILABEL = "TextClassificationMultilabel" + """Multilabel classification task assigns each sample to a group (zero or more) of target labels.""" + TEXT_NER = "TextNER" + """Text Named Entity Recognition a.k.a. TextNER. + #: Named Entity Recognition (NER) is the ability to take free-form text and identify the + #: occurrences of entities such as people, locations, organizations, and more.""" + + +class TextAnnotationType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Annotation type of text data.""" + + CLASSIFICATION = "Classification" + NAMED_ENTITY_RECOGNITION = "NamedEntityRecognition" + + +class TrainingMode(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Training mode dictates whether to use distributed training or not.""" + + AUTO = "Auto" + """Auto mode""" + DISTRIBUTED = "Distributed" + """Distributed training mode""" + NON_DISTRIBUTED = "NonDistributed" + """Non distributed training mode""" + + +class TriggerType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """TriggerType.""" + + RECURRENCE = "Recurrence" + CRON = "Cron" + + +class UnderlyingResourceAction(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """UnderlyingResourceAction.""" + + DELETE = "Delete" + DETACH = "Detach" + + +class UnitOfMeasure(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The unit of time measurement for the specified VM price. Example: OneHour.""" + + ONE_HOUR = "OneHour" + + +class UsageUnit(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """An enum describing the unit of usage measurement.""" + + COUNT = "Count" + + +class UseStl(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Configure STL Decomposition of the time-series target column.""" + + NONE = "None" + """No stl decomposition.""" + SEASON = "Season" + SEASON_TREND = "SeasonTrend" + + +class ValidationMetricType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Metric computation method to use for validation metrics in image tasks.""" + + NONE = "None" + """No metric.""" + COCO = "Coco" + """Coco metric.""" + VOC = "Voc" + """Voc metric.""" + COCO_VOC = "CocoVoc" + """CocoVoc metric.""" + + +class VMPriceOSType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Operating system type used by the VM.""" + + LINUX = "Linux" + WINDOWS = "Windows" + + +class VmPriority(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Virtual Machine priority.""" + + DEDICATED = "Dedicated" + LOW_PRIORITY = "LowPriority" + + +class VMTier(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """The type of the VM.""" + + STANDARD = "Standard" + LOW_PRIORITY = "LowPriority" + SPOT = "Spot" + + +class VolumeDefinitionType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe.""" + + BIND = "bind" + VOLUME = "volume" + TMPFS = "tmpfs" + NPIPE = "npipe" + + +class WebhookType(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum to determine the webhook callback service type.""" + + AZURE_DEV_OPS = "AzureDevOps" + + +class WeekDay(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """Enum of weekday.""" + + MONDAY = "Monday" + """Monday weekday""" + TUESDAY = "Tuesday" + """Tuesday weekday""" + WEDNESDAY = "Wednesday" + """Wednesday weekday""" + THURSDAY = "Thursday" + """Thursday weekday""" + FRIDAY = "Friday" + """Friday weekday""" + SATURDAY = "Saturday" + """Saturday weekday""" + SUNDAY = "Sunday" + """Sunday weekday""" diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_azure_machine_learning_workspaces_enums.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_azure_machine_learning_workspaces_enums.py deleted file mode 100644 index 8fc467a97482..000000000000 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_azure_machine_learning_workspaces_enums.py +++ /dev/null @@ -1,1955 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from enum import Enum -from six import with_metaclass -from azure.core import CaseInsensitiveEnumMeta - - -class AllocationState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Allocation state of the compute. Possible values are: steady - Indicates that the compute is - not resizing. There are no changes to the number of compute nodes in the compute in progress. A - compute enters this state when it is created and when no operations are being performed on the - compute to change the number of compute nodes. resizing - Indicates that the compute is - resizing; that is, compute nodes are being added to or removed from the compute. - """ - - STEADY = "Steady" - RESIZING = "Resizing" - -class ApplicationSharingPolicy(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Policy for sharing applications on this compute instance among users of parent workspace. If - Personal, only the creator can access applications on this compute instance. When Shared, any - workspace user can access applications on this instance depending on his/her assigned role. - """ - - PERSONAL = "Personal" - SHARED = "Shared" - -class AssetProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Provisioning state of registry asset. - """ - - SUCCEEDED = "Succeeded" - FAILED = "Failed" - CANCELED = "Canceled" - CREATING = "Creating" - UPDATING = "Updating" - DELETING = "Deleting" - -class AutoDeleteCondition(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - CREATED_GREATER_THAN = "CreatedGreaterThan" - LAST_ACCESSED_GREATER_THAN = "LastAccessedGreaterThan" - -class AutoRebuildSetting(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """AutoRebuild setting for the derived image - """ - - DISABLED = "Disabled" - ON_BASE_IMAGE_UPDATE = "OnBaseImageUpdate" - -class Autosave(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Auto save settings. - """ - - NONE = "None" - LOCAL = "Local" - REMOTE = "Remote" - -class BaseEnvironmentSourceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Base environment type. - """ - - ENVIRONMENT_ASSET = "EnvironmentAsset" - -class BatchDeploymentConfigurationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The enumerated property types for batch deployments. - """ - - MODEL = "Model" - PIPELINE_COMPONENT = "PipelineComponent" - -class BatchLoggingLevel(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Log verbosity for batch inferencing. - Increasing verbosity order for logging is : Warning, Info and Debug. - The default value is Info. - """ - - INFO = "Info" - WARNING = "Warning" - DEBUG = "Debug" - -class BatchOutputAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine how batch inferencing will handle output - """ - - SUMMARY_ONLY = "SummaryOnly" - APPEND_ROW = "AppendRow" - -class BillingCurrency(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Three lettered code specifying the currency of the VM price. Example: USD - """ - - USD = "USD" - -class BlockedTransformers(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum for all classification models supported by AutoML. - """ - - #: Target encoding for text data. - TEXT_TARGET_ENCODER = "TextTargetEncoder" - #: Ohe hot encoding creates a binary feature transformation. - ONE_HOT_ENCODER = "OneHotEncoder" - #: Target encoding for categorical data. - CAT_TARGET_ENCODER = "CatTargetEncoder" - #: Tf-Idf stands for, term-frequency times inverse document-frequency. This is a common term - #: weighting scheme for identifying information from documents. - TF_IDF = "TfIdf" - #: Weight of Evidence encoding is a technique used to encode categorical variables. It uses the - #: natural log of the P(1)/P(0) to create weights. - WO_E_TARGET_ENCODER = "WoETargetEncoder" - #: Label encoder converts labels/categorical variables in a numerical form. - LABEL_ENCODER = "LabelEncoder" - #: Word embedding helps represents words or phrases as a vector, or a series of numbers. - WORD_EMBEDDING = "WordEmbedding" - #: Naive Bayes is a classified that is used for classification of discrete features that are - #: categorically distributed. - NAIVE_BAYES = "NaiveBayes" - #: Count Vectorizer converts a collection of text documents to a matrix of token counts. - COUNT_VECTORIZER = "CountVectorizer" - #: Hashing One Hot Encoder can turn categorical variables into a limited number of new features. - #: This is often used for high-cardinality categorical features. - HASH_ONE_HOT_ENCODER = "HashOneHotEncoder" - -class Caching(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Caching type of Data Disk. - """ - - NONE = "None" - READ_ONLY = "ReadOnly" - READ_WRITE = "ReadWrite" - -class CategoricalDataDriftMetric(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: The Jensen Shannon Distance (JSD) metric. - JENSEN_SHANNON_DISTANCE = "JensenShannonDistance" - #: The Population Stability Index (PSI) metric. - POPULATION_STABILITY_INDEX = "PopulationStabilityIndex" - #: The Pearsons Chi Squared Test metric. - PEARSONS_CHI_SQUARED_TEST = "PearsonsChiSquaredTest" - -class CategoricalDataQualityMetric(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: Calculates the rate of null values. - NULL_VALUE_RATE = "NullValueRate" - #: Calculates the rate of data type errors. - DATA_TYPE_ERROR_RATE = "DataTypeErrorRate" - #: Calculates the rate values are out of bounds. - OUT_OF_BOUNDS_RATE = "OutOfBoundsRate" - -class CategoricalPredictionDriftMetric(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: The Jensen Shannon Distance (JSD) metric. - JENSEN_SHANNON_DISTANCE = "JensenShannonDistance" - #: The Population Stability Index (PSI) metric. - POPULATION_STABILITY_INDEX = "PopulationStabilityIndex" - #: The Pearsons Chi Squared Test metric. - PEARSONS_CHI_SQUARED_TEST = "PearsonsChiSquaredTest" - -class ClassificationModelPerformanceMetric(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: Calculates the accuracy of the model predictions. - ACCURACY = "Accuracy" - #: Calculates the precision of the model predictions. - PRECISION = "Precision" - #: Calculates the recall of the model predictions. - RECALL = "Recall" - -class ClassificationModels(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum for all classification models supported by AutoML. - """ - - #: Logistic regression is a fundamental classification technique. - #: It belongs to the group of linear classifiers and is somewhat similar to polynomial and linear - #: regression. - #: Logistic regression is fast and relatively uncomplicated, and it's convenient for you to - #: interpret the results. - #: Although it's essentially a method for binary classification, it can also be applied to - #: multiclass problems. - LOGISTIC_REGRESSION = "LogisticRegression" - #: SGD: Stochastic gradient descent is an optimization algorithm often used in machine learning - #: applications - #: to find the model parameters that correspond to the best fit between predicted and actual - #: outputs. - SGD = "SGD" - #: The multinomial Naive Bayes classifier is suitable for classification with discrete features - #: (e.g., word counts for text classification). - #: The multinomial distribution normally requires integer feature counts. However, in practice, - #: fractional counts such as tf-idf may also work. - MULTINOMIAL_NAIVE_BAYES = "MultinomialNaiveBayes" - #: Naive Bayes classifier for multivariate Bernoulli models. - BERNOULLI_NAIVE_BAYES = "BernoulliNaiveBayes" - #: A support vector machine (SVM) is a supervised machine learning model that uses classification - #: algorithms for two-group classification problems. - #: After giving an SVM model sets of labeled training data for each category, they're able to - #: categorize new text. - SVM = "SVM" - #: A support vector machine (SVM) is a supervised machine learning model that uses classification - #: algorithms for two-group classification problems. - #: After giving an SVM model sets of labeled training data for each category, they're able to - #: categorize new text. - #: Linear SVM performs best when input data is linear, i.e., data can be easily classified by - #: drawing the straight line between classified values on a plotted graph. - LINEAR_SVM = "LinearSVM" - #: K-nearest neighbors (KNN) algorithm uses 'feature similarity' to predict the values of new - #: datapoints - #: which further means that the new data point will be assigned a value based on how closely it - #: matches the points in the training set. - KNN = "KNN" - #: Decision Trees are a non-parametric supervised learning method used for both classification and - #: regression tasks. - #: The goal is to create a model that predicts the value of a target variable by learning simple - #: decision rules inferred from the data features. - DECISION_TREE = "DecisionTree" - #: Random forest is a supervised learning algorithm. - #: The "forest" it builds, is an ensemble of decision trees, usually trained with the bagging - #: method. - #: The general idea of the bagging method is that a combination of learning models increases the - #: overall result. - RANDOM_FOREST = "RandomForest" - #: Extreme Trees is an ensemble machine learning algorithm that combines the predictions from many - #: decision trees. It is related to the widely used random forest algorithm. - EXTREME_RANDOM_TREES = "ExtremeRandomTrees" - #: LightGBM is a gradient boosting framework that uses tree based learning algorithms. - LIGHT_GBM = "LightGBM" - #: The technique of transiting week learners into a strong learner is called Boosting. The - #: gradient boosting algorithm process works on this theory of execution. - GRADIENT_BOOSTING = "GradientBoosting" - #: XGBoost: Extreme Gradient Boosting Algorithm. This algorithm is used for structured data where - #: target column values can be divided into distinct class values. - XG_BOOST_CLASSIFIER = "XGBoostClassifier" - -class ClassificationMultilabelPrimaryMetrics(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Primary metrics for classification multilabel tasks. - """ - - #: AUC is the Area under the curve. - #: This metric represents arithmetic mean of the score for each class, - #: weighted by the number of true instances in each class. - AUC_WEIGHTED = "AUCWeighted" - #: Accuracy is the ratio of predictions that exactly match the true class labels. - ACCURACY = "Accuracy" - #: Normalized macro recall is recall macro-averaged and normalized, so that random - #: performance has a score of 0, and perfect performance has a score of 1. - NORM_MACRO_RECALL = "NormMacroRecall" - #: The arithmetic mean of the average precision score for each class, weighted by - #: the number of true instances in each class. - AVERAGE_PRECISION_SCORE_WEIGHTED = "AveragePrecisionScoreWeighted" - #: The arithmetic mean of precision for each class, weighted by number of true instances in each - #: class. - PRECISION_SCORE_WEIGHTED = "PrecisionScoreWeighted" - #: Intersection Over Union. Intersection of predictions divided by union of predictions. - IOU = "IOU" - -class ClassificationPrimaryMetrics(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Primary metrics for classification tasks. - """ - - #: AUC is the Area under the curve. - #: This metric represents arithmetic mean of the score for each class, - #: weighted by the number of true instances in each class. - AUC_WEIGHTED = "AUCWeighted" - #: Accuracy is the ratio of predictions that exactly match the true class labels. - ACCURACY = "Accuracy" - #: Normalized macro recall is recall macro-averaged and normalized, so that random - #: performance has a score of 0, and perfect performance has a score of 1. - NORM_MACRO_RECALL = "NormMacroRecall" - #: The arithmetic mean of the average precision score for each class, weighted by - #: the number of true instances in each class. - AVERAGE_PRECISION_SCORE_WEIGHTED = "AveragePrecisionScoreWeighted" - #: The arithmetic mean of precision for each class, weighted by number of true instances in each - #: class. - PRECISION_SCORE_WEIGHTED = "PrecisionScoreWeighted" - -class ClusterPurpose(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Intended usage of the cluster - """ - - FAST_PROD = "FastProd" - DENSE_PROD = "DenseProd" - DEV_TEST = "DevTest" - -class ComputeInstanceAuthorizationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The Compute Instance Authorization type. Available values are personal (default). - """ - - PERSONAL = "personal" - -class ComputeInstanceState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Current state of an ComputeInstance. - """ - - CREATING = "Creating" - CREATE_FAILED = "CreateFailed" - DELETING = "Deleting" - RUNNING = "Running" - RESTARTING = "Restarting" - RESIZING = "Resizing" - JOB_RUNNING = "JobRunning" - SETTING_UP = "SettingUp" - SETUP_FAILED = "SetupFailed" - STARTING = "Starting" - STOPPED = "Stopped" - STOPPING = "Stopping" - USER_SETTING_UP = "UserSettingUp" - USER_SETUP_FAILED = "UserSetupFailed" - UNKNOWN = "Unknown" - UNUSABLE = "Unusable" - -class ComputePowerAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """[Required] The compute power action. - """ - - START = "Start" - STOP = "Stop" - -class ComputeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The type of compute - """ - - AKS = "AKS" - KUBERNETES = "Kubernetes" - AML_COMPUTE = "AmlCompute" - COMPUTE_INSTANCE = "ComputeInstance" - DATA_FACTORY = "DataFactory" - VIRTUAL_MACHINE = "VirtualMachine" - HD_INSIGHT = "HDInsight" - DATABRICKS = "Databricks" - DATA_LAKE_ANALYTICS = "DataLakeAnalytics" - SYNAPSE_SPARK = "SynapseSpark" - -class ConnectionAuthType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Authentication type of the connection target - """ - - PAT = "PAT" - MANAGED_IDENTITY = "ManagedIdentity" - USERNAME_PASSWORD = "UsernamePassword" - NONE = "None" - SAS = "SAS" - SERVICE_PRINCIPAL = "ServicePrincipal" - ACCESS_KEY = "AccessKey" - API_KEY = "ApiKey" - CUSTOM_KEYS = "CustomKeys" - -class ConnectionCategory(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Category of the connection - """ - - PYTHON_FEED = "PythonFeed" - CONTAINER_REGISTRY = "ContainerRegistry" - GIT = "Git" - S3 = "S3" - SNOWFLAKE = "Snowflake" - AZURE_SQL_DB = "AzureSqlDb" - AZURE_SYNAPSE_ANALYTICS = "AzureSynapseAnalytics" - AZURE_MY_SQL_DB = "AzureMySqlDb" - AZURE_POSTGRES_DB = "AzurePostgresDb" - ADLS_GEN2 = "ADLSGen2" - REDIS = "Redis" - API_KEY = "ApiKey" - AZURE_OPEN_AI = "AzureOpenAI" - COGNITIVE_SEARCH = "CognitiveSearch" - COGNITIVE_SERVICE = "CognitiveService" - CUSTOM_KEYS = "CustomKeys" - -class ContainerType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The type of container to retrieve logs from. - """ - - #: The container used to download models and score script. - STORAGE_INITIALIZER = "StorageInitializer" - #: The container used to serve user's request. - INFERENCE_SERVER = "InferenceServer" - #: The container used to collect payload and custom logging when mdc is enabled. - MODEL_DATA_COLLECTOR = "ModelDataCollector" - -class CreatedByType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The type of identity that created the resource. - """ - - USER = "User" - APPLICATION = "Application" - MANAGED_IDENTITY = "ManagedIdentity" - KEY = "Key" - -class CredentialsType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the datastore credentials type. - """ - - ACCOUNT_KEY = "AccountKey" - CERTIFICATE = "Certificate" - NONE = "None" - SAS = "Sas" - SERVICE_PRINCIPAL = "ServicePrincipal" - KERBEROS_KEYTAB = "KerberosKeytab" - KERBEROS_PASSWORD = "KerberosPassword" - -class DataAvailabilityStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - NONE = "None" - PENDING = "Pending" - INCOMPLETE = "Incomplete" - COMPLETE = "Complete" - -class DataCollectionMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - ENABLED = "Enabled" - DISABLED = "Disabled" - -class DataImportSourceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the type of data. - """ - - DATABASE = "database" - FILE_SYSTEM = "file_system" - -class DatastoreType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the datastore contents type. - """ - - AZURE_BLOB = "AzureBlob" - AZURE_DATA_LAKE_GEN1 = "AzureDataLakeGen1" - AZURE_DATA_LAKE_GEN2 = "AzureDataLakeGen2" - AZURE_FILE = "AzureFile" - HDFS = "Hdfs" - ONE_LAKE = "OneLake" - -class DataType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the type of data. - """ - - URI_FILE = "uri_file" - URI_FOLDER = "uri_folder" - MLTABLE = "mltable" - -class DeploymentProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Possible values for DeploymentProvisioningState. - """ - - CREATING = "Creating" - DELETING = "Deleting" - SCALING = "Scaling" - UPDATING = "Updating" - SUCCEEDED = "Succeeded" - FAILED = "Failed" - CANCELED = "Canceled" - -class DiagnoseResultLevel(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Level of workspace setup error - """ - - WARNING = "Warning" - ERROR = "Error" - INFORMATION = "Information" - -class DistributionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the job distribution type. - """ - - PY_TORCH = "PyTorch" - TENSOR_FLOW = "TensorFlow" - MPI = "Mpi" - RAY = "Ray" - -class EarlyTerminationPolicyType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - BANDIT = "Bandit" - MEDIAN_STOPPING = "MedianStopping" - TRUNCATION_SELECTION = "TruncationSelection" - -class EgressPublicNetworkAccessType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine whether PublicNetworkAccess is Enabled or Disabled for egress of a - deployment. - """ - - ENABLED = "Enabled" - DISABLED = "Disabled" - -class EmailNotificationEnableType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the email notification type. - """ - - JOB_COMPLETED = "JobCompleted" - JOB_FAILED = "JobFailed" - JOB_CANCELLED = "JobCancelled" - -class EncryptionStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Indicates whether or not the encryption is enabled for the workspace. - """ - - ENABLED = "Enabled" - DISABLED = "Disabled" - -class EndpointAuthMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine endpoint authentication mode. - """ - - AML_TOKEN = "AMLToken" - KEY = "Key" - AAD_TOKEN = "AADToken" - -class EndpointComputeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine endpoint compute type. - """ - - MANAGED = "Managed" - KUBERNETES = "Kubernetes" - AZURE_ML_COMPUTE = "AzureMLCompute" - -class EndpointProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """State of endpoint provisioning. - """ - - CREATING = "Creating" - DELETING = "Deleting" - SUCCEEDED = "Succeeded" - FAILED = "Failed" - UPDATING = "Updating" - CANCELED = "Canceled" - -class EndpointServiceConnectionStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Connection status of the service consumer with the service provider - """ - - APPROVED = "Approved" - PENDING = "Pending" - REJECTED = "Rejected" - DISCONNECTED = "Disconnected" - TIMEOUT = "Timeout" - -class EnvironmentType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Environment type is either user created or curated by Azure ML service - """ - - CURATED = "Curated" - USER_CREATED = "UserCreated" - -class EnvironmentVariableType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Type of the Environment Variable. Possible values are: local - For local variable - """ - - LOCAL = "local" - -class ExportFormatType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The format of exported labels. - """ - - DATASET = "Dataset" - COCO = "Coco" - CSV = "CSV" - -class FeatureAttributionMetric(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: The Normalized Discounted Cumulative Gain metric. - NORMALIZED_DISCOUNTED_CUMULATIVE_GAIN = "NormalizedDiscountedCumulativeGain" - -class FeatureDataType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - STRING = "String" - INTEGER = "Integer" - LONG = "Long" - FLOAT = "Float" - DOUBLE = "Double" - BINARY = "Binary" - DATETIME = "Datetime" - BOOLEAN = "Boolean" - -class FeatureLags(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Flag for generating lags for the numeric features. - """ - - #: No feature lags generated. - NONE = "None" - #: System auto-generates feature lags. - AUTO = "Auto" - -class FeaturizationMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Featurization mode - determines data featurization mode. - """ - - #: Auto mode, system performs featurization without any custom featurization inputs. - AUTO = "Auto" - #: Custom featurization. - CUSTOM = "Custom" - #: Featurization off. 'Forecasting' task cannot use this value. - OFF = "Off" - -class ForecastHorizonMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine forecast horizon selection mode. - """ - - #: Forecast horizon to be determined automatically. - AUTO = "Auto" - #: Use the custom forecast horizon. - CUSTOM = "Custom" - -class ForecastingModels(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum for all forecasting models supported by AutoML. - """ - - #: Auto-Autoregressive Integrated Moving Average (ARIMA) model uses time-series data and - #: statistical analysis to interpret the data and make future predictions. - #: This model aims to explain data by using time series data on its past values and uses linear - #: regression to make predictions. - AUTO_ARIMA = "AutoArima" - #: Prophet is a procedure for forecasting time series data based on an additive model where - #: non-linear trends are fit with yearly, weekly, and daily seasonality, plus holiday effects. - #: It works best with time series that have strong seasonal effects and several seasons of - #: historical data. Prophet is robust to missing data and shifts in the trend, and typically - #: handles outliers well. - PROPHET = "Prophet" - #: The Naive forecasting model makes predictions by carrying forward the latest target value for - #: each time-series in the training data. - NAIVE = "Naive" - #: The Seasonal Naive forecasting model makes predictions by carrying forward the latest season of - #: target values for each time-series in the training data. - SEASONAL_NAIVE = "SeasonalNaive" - #: The Average forecasting model makes predictions by carrying forward the average of the target - #: values for each time-series in the training data. - AVERAGE = "Average" - #: The Seasonal Average forecasting model makes predictions by carrying forward the average value - #: of the latest season of data for each time-series in the training data. - SEASONAL_AVERAGE = "SeasonalAverage" - #: Exponential smoothing is a time series forecasting method for univariate data that can be - #: extended to support data with a systematic trend or seasonal component. - EXPONENTIAL_SMOOTHING = "ExponentialSmoothing" - #: An Autoregressive Integrated Moving Average with Explanatory Variable (ARIMAX) model can be - #: viewed as a multiple regression model with one or more autoregressive (AR) terms and/or one or - #: more moving average (MA) terms. - #: This method is suitable for forecasting when data is stationary/non stationary, and - #: multivariate with any type of data pattern, i.e., level/trend /seasonality/cyclicity. - ARIMAX = "Arimax" - #: TCNForecaster: Temporal Convolutional Networks Forecaster. //TODO: Ask forecasting team for - #: brief intro. - TCN_FORECASTER = "TCNForecaster" - #: Elastic net is a popular type of regularized linear regression that combines two popular - #: penalties, specifically the L1 and L2 penalty functions. - ELASTIC_NET = "ElasticNet" - #: The technique of transiting week learners into a strong learner is called Boosting. The - #: gradient boosting algorithm process works on this theory of execution. - GRADIENT_BOOSTING = "GradientBoosting" - #: Decision Trees are a non-parametric supervised learning method used for both classification and - #: regression tasks. - #: The goal is to create a model that predicts the value of a target variable by learning simple - #: decision rules inferred from the data features. - DECISION_TREE = "DecisionTree" - #: K-nearest neighbors (KNN) algorithm uses 'feature similarity' to predict the values of new - #: datapoints - #: which further means that the new data point will be assigned a value based on how closely it - #: matches the points in the training set. - KNN = "KNN" - #: Lasso model fit with Least Angle Regression a.k.a. Lars. It is a Linear Model trained with an - #: L1 prior as regularizer. - LASSO_LARS = "LassoLars" - #: SGD: Stochastic gradient descent is an optimization algorithm often used in machine learning - #: applications - #: to find the model parameters that correspond to the best fit between predicted and actual - #: outputs. - #: It's an inexact but powerful technique. - SGD = "SGD" - #: Random forest is a supervised learning algorithm. - #: The "forest" it builds, is an ensemble of decision trees, usually trained with the bagging - #: method. - #: The general idea of the bagging method is that a combination of learning models increases the - #: overall result. - RANDOM_FOREST = "RandomForest" - #: Extreme Trees is an ensemble machine learning algorithm that combines the predictions from many - #: decision trees. It is related to the widely used random forest algorithm. - EXTREME_RANDOM_TREES = "ExtremeRandomTrees" - #: LightGBM is a gradient boosting framework that uses tree based learning algorithms. - LIGHT_GBM = "LightGBM" - #: XGBoostRegressor: Extreme Gradient Boosting Regressor is a supervised machine learning model - #: using ensemble of base learners. - XG_BOOST_REGRESSOR = "XGBoostRegressor" - -class ForecastingPrimaryMetrics(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Primary metrics for Forecasting task. - """ - - #: The Spearman's rank coefficient of correlation is a non-parametric measure of rank correlation. - SPEARMAN_CORRELATION = "SpearmanCorrelation" - #: The Normalized Root Mean Squared Error (NRMSE) the RMSE facilitates the comparison between - #: models with different scales. - NORMALIZED_ROOT_MEAN_SQUARED_ERROR = "NormalizedRootMeanSquaredError" - #: The R2 score is one of the performance evaluation measures for forecasting-based machine - #: learning models. - R2_SCORE = "R2Score" - #: The Normalized Mean Absolute Error (NMAE) is a validation metric to compare the Mean Absolute - #: Error (MAE) of (time) series with different scales. - NORMALIZED_MEAN_ABSOLUTE_ERROR = "NormalizedMeanAbsoluteError" - -class GenerationSafetyQualityMetric(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Generation safety quality metric enum. - """ - - ACCEPTABLE_GROUNDEDNESS_SCORE_PER_INSTANCE = "AcceptableGroundednessScorePerInstance" - AGGREGATED_GROUNDEDNESS_PASS_RATE = "AggregatedGroundednessPassRate" - ACCEPTABLE_COHERENCE_SCORE_PER_INSTANCE = "AcceptableCoherenceScorePerInstance" - AGGREGATED_COHERENCE_PASS_RATE = "AggregatedCoherencePassRate" - ACCEPTABLE_FLUENCY_SCORE_PER_INSTANCE = "AcceptableFluencyScorePerInstance" - AGGREGATED_FLUENCY_PASS_RATE = "AggregatedFluencyPassRate" - ACCEPTABLE_SIMILARITY_SCORE_PER_INSTANCE = "AcceptableSimilarityScorePerInstance" - AGGREGATED_SIMILARITY_PASS_RATE = "AggregatedSimilarityPassRate" - ACCEPTABLE_RELEVANCE_SCORE_PER_INSTANCE = "AcceptableRelevanceScorePerInstance" - AGGREGATED_RELEVANCE_PASS_RATE = "AggregatedRelevancePassRate" - -class GenerationTokenStatisticsMetric(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Generation token statistics metric enum. - """ - - TOTAL_TOKEN_COUNT = "TotalTokenCount" - TOTAL_TOKEN_COUNT_PER_GROUP = "TotalTokenCountPerGroup" - -class Goal(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Defines supported metric goals for hyperparameter tuning - """ - - MINIMIZE = "Minimize" - MAXIMIZE = "Maximize" - -class IdentityConfigurationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine identity framework. - """ - - MANAGED = "Managed" - AML_TOKEN = "AMLToken" - USER_IDENTITY = "UserIdentity" - -class ImageAnnotationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Annotation type of image data. - """ - - CLASSIFICATION = "Classification" - BOUNDING_BOX = "BoundingBox" - INSTANCE_SEGMENTATION = "InstanceSegmentation" - -class ImageType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Type of the image. Possible values are: docker - For docker images. azureml - For AzureML - images - """ - - DOCKER = "docker" - AZUREML = "azureml" - -class IncrementalDataRefresh(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Whether IncrementalDataRefresh is enabled - """ - - ENABLED = "Enabled" - DISABLED = "Disabled" - -class InferencingServerType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Inferencing server type for various targets. - """ - - AZURE_ML_ONLINE = "AzureMLOnline" - AZURE_ML_BATCH = "AzureMLBatch" - TRITON = "Triton" - CUSTOM = "Custom" - -class InputDeliveryMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the input data delivery mode. - """ - - READ_ONLY_MOUNT = "ReadOnlyMount" - READ_WRITE_MOUNT = "ReadWriteMount" - DOWNLOAD = "Download" - DIRECT = "Direct" - EVAL_MOUNT = "EvalMount" - EVAL_DOWNLOAD = "EvalDownload" - -class InputPathType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Input path type for package inputs. - """ - - URL = "Url" - PATH_ID = "PathId" - PATH_VERSION = "PathVersion" - -class InstanceSegmentationPrimaryMetrics(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Primary metrics for InstanceSegmentation tasks. - """ - - #: Mean Average Precision (MAP) is the average of AP (Average Precision). - #: AP is calculated for each class and averaged to get the MAP. - MEAN_AVERAGE_PRECISION = "MeanAveragePrecision" - -class IsolationMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Isolation mode for the managed network of a machine learning workspace. - """ - - DISABLED = "Disabled" - ALLOW_INTERNET_OUTBOUND = "AllowInternetOutbound" - ALLOW_ONLY_APPROVED_OUTBOUND = "AllowOnlyApprovedOutbound" - -class JobInputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the Job Input Type. - """ - - LITERAL = "literal" - URI_FILE = "uri_file" - URI_FOLDER = "uri_folder" - MLTABLE = "mltable" - CUSTOM_MODEL = "custom_model" - MLFLOW_MODEL = "mlflow_model" - TRITON_MODEL = "triton_model" - -class JobLimitsType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - COMMAND = "Command" - SWEEP = "Sweep" - -class JobOutputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the Job Output Type. - """ - - URI_FILE = "uri_file" - URI_FOLDER = "uri_folder" - MLTABLE = "mltable" - CUSTOM_MODEL = "custom_model" - MLFLOW_MODEL = "mlflow_model" - TRITON_MODEL = "triton_model" - -class JobProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the job provisioning state. - """ - - SUCCEEDED = "Succeeded" - FAILED = "Failed" - CANCELED = "Canceled" - IN_PROGRESS = "InProgress" - -class JobStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The status of a job. - """ - - #: Run hasn't started yet. - NOT_STARTED = "NotStarted" - #: Run has started. The user has a run ID. - STARTING = "Starting" - #: (Not used currently) It will be used if ES is creating the compute target. - PROVISIONING = "Provisioning" - #: The run environment is being prepared. - PREPARING = "Preparing" - #: The job is queued in the compute target. For example, in BatchAI the job is in queued state, - #: while waiting for all required nodes to be ready. - QUEUED = "Queued" - #: The job started to run in the compute target. - RUNNING = "Running" - #: Job is completed in the target. It is in output collection state now. - FINALIZING = "Finalizing" - #: Cancellation has been requested for the job. - CANCEL_REQUESTED = "CancelRequested" - #: Job completed successfully. This reflects that both the job itself and output collection states - #: completed successfully. - COMPLETED = "Completed" - #: Job failed. - FAILED = "Failed" - #: Following cancellation request, the job is now successfully canceled. - CANCELED = "Canceled" - #: When heartbeat is enabled, if the run isn't updating any information to RunHistory then the run - #: goes to NotResponding state. - #: NotResponding is the only state that is exempt from strict transition orders. A run can go from - #: NotResponding to any of the previous states. - NOT_RESPONDING = "NotResponding" - #: The job is paused by users. Some adjustment to labeling jobs can be made only in paused state. - PAUSED = "Paused" - #: Default job status if not mapped to all other statuses. - UNKNOWN = "Unknown" - #: The job is in a scheduled state. Job is not in any active state. - SCHEDULED = "Scheduled" - -class JobTier(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the job tier. - """ - - NULL = "Null" - SPOT = "Spot" - BASIC = "Basic" - STANDARD = "Standard" - PREMIUM = "Premium" - -class JobType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the type of job. - """ - - AUTO_ML = "AutoML" - COMMAND = "Command" - LABELING = "Labeling" - SWEEP = "Sweep" - PIPELINE = "Pipeline" - SPARK = "Spark" - -class KeyType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - PRIMARY = "Primary" - SECONDARY = "Secondary" - -class LearningRateScheduler(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Learning rate scheduler enum. - """ - - #: No learning rate scheduler selected. - NONE = "None" - #: Cosine Annealing With Warmup. - WARMUP_COSINE = "WarmupCosine" - #: Step learning rate scheduler. - STEP = "Step" - -class ListViewType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - ACTIVE_ONLY = "ActiveOnly" - ARCHIVED_ONLY = "ArchivedOnly" - ALL = "All" - -class LoadBalancerType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Load Balancer Type - """ - - PUBLIC_IP = "PublicIp" - INTERNAL_LOAD_BALANCER = "InternalLoadBalancer" - -class LogTrainingMetrics(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: Enable compute and log training metrics. - ENABLE = "Enable" - #: Disable compute and log training metrics. - DISABLE = "Disable" - -class LogValidationLoss(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: Enable compute and log validation metrics. - ENABLE = "Enable" - #: Disable compute and log validation metrics. - DISABLE = "Disable" - -class LogVerbosity(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum for setting log verbosity. - """ - - #: No logs emitted. - NOT_SET = "NotSet" - #: Debug and above log statements logged. - DEBUG = "Debug" - #: Info and above log statements logged. - INFO = "Info" - #: Warning and above log statements logged. - WARNING = "Warning" - #: Error and above log statements logged. - ERROR = "Error" - #: Only critical statements logged. - CRITICAL = "Critical" - -class ManagedNetworkStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Status for the managed network of a machine learning workspace. - """ - - INACTIVE = "Inactive" - ACTIVE = "Active" - -class ManagedServiceIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Type of managed service identity (where both SystemAssigned and UserAssigned types are - allowed). - """ - - NONE = "None" - SYSTEM_ASSIGNED = "SystemAssigned" - USER_ASSIGNED = "UserAssigned" - SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned,UserAssigned" - -class MaterializationStoreType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - NONE = "None" - ONLINE = "Online" - OFFLINE = "Offline" - ONLINE_AND_OFFLINE = "OnlineAndOffline" - -class MediaType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Media type of data asset. - """ - - IMAGE = "Image" - TEXT = "Text" - -class MLAssistConfigurationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - ENABLED = "Enabled" - DISABLED = "Disabled" - -class MlflowAutologger(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Indicates whether mlflow autologger is enabled for notebooks. - """ - - ENABLED = "Enabled" - DISABLED = "Disabled" - -class MLFlowAutologgerState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the state of mlflow autologger. - """ - - ENABLED = "Enabled" - DISABLED = "Disabled" - -class ModelSize(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Image model size. - """ - - #: No value selected. - NONE = "None" - #: Small size. - SMALL = "Small" - #: Medium size. - MEDIUM = "Medium" - #: Large size. - LARGE = "Large" - #: Extra large size. - EXTRA_LARGE = "ExtraLarge" - -class ModelTaskType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Model task type enum. - """ - - CLASSIFICATION = "Classification" - REGRESSION = "Regression" - QUESTION_ANSWERING = "QuestionAnswering" - -class MonitorComputeIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Monitor compute identity type enum. - """ - - #: Authenticates through user's AML token. - AML_TOKEN = "AmlToken" - #: Authenticates through a user-provided managed identity. - MANAGED_IDENTITY = "ManagedIdentity" - -class MonitorComputeType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Monitor compute type enum. - """ - - #: Serverless Spark compute. - SERVERLESS_SPARK = "ServerlessSpark" - -class MonitoringAlertNotificationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: Settings for Azure Monitor based alerting. - AZURE_MONITOR = "AzureMonitor" - #: Settings for AML email notifications. - EMAIL = "Email" - -class MonitoringFeatureDataType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: Used for features of numerical data type. - NUMERICAL = "Numerical" - #: Used for features of categorical data type. - CATEGORICAL = "Categorical" - -class MonitoringFeatureFilterType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: Includes all features. - ALL_FEATURES = "AllFeatures" - #: Only includes the top contributing features, measured by feature attribution. - TOP_N_BY_ATTRIBUTION = "TopNByAttribution" - #: Includes a user-defined subset of features. - FEATURE_SUBSET = "FeatureSubset" - -class MonitoringInputDataType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Monitoring input data type enum. - """ - - #: An input data with a fixed window size. - STATIC = "Static" - #: An input data which trailing relatively to the monitor's current run. - TRAILING = "Trailing" - #: An input data with tabular format which doesn't require preprocessing. - FIXED = "Fixed" - -class MonitoringModelType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: A model trained for classification tasks. - CLASSIFICATION = "Classification" - #: A model trained for regressions tasks. - REGRESSION = "Regression" - -class MonitoringNotificationMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: Disabled notifications will not produce emails/metrics leveraged for alerting. - DISABLED = "Disabled" - #: Enabled notification will produce emails/metrics leveraged for alerting. - ENABLED = "Enabled" - -class MonitoringSignalType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: Tracks model input data distribution change, comparing against training data or past production - #: data. - DATA_DRIFT = "DataDrift" - #: Tracks prediction result data distribution change, comparing against validation/test label data - #: or past production data. - PREDICTION_DRIFT = "PredictionDrift" - #: Tracks model input data integrity. - DATA_QUALITY = "DataQuality" - #: Tracks feature importance change in production, comparing against feature importance at - #: training time. - FEATURE_ATTRIBUTION_DRIFT = "FeatureAttributionDrift" - #: Tracks a custom signal provided by users. - CUSTOM = "Custom" - #: Tracks model performance based on ground truth data. - MODEL_PERFORMANCE = "ModelPerformance" - #: Tracks the safety and quality of generated content. - GENERATION_SAFETY_QUALITY = "GenerationSafetyQuality" - #: Tracks the token usage of generative endpoints. - GENERATION_TOKEN_STATISTICS = "GenerationTokenStatistics" - -class MountAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Mount Action. - """ - - MOUNT = "Mount" - UNMOUNT = "Unmount" - -class MountState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Mount state. - """ - - MOUNT_REQUESTED = "MountRequested" - MOUNTED = "Mounted" - MOUNT_FAILED = "MountFailed" - UNMOUNT_REQUESTED = "UnmountRequested" - UNMOUNT_FAILED = "UnmountFailed" - UNMOUNTED = "Unmounted" - -class MultiSelect(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Whether multiSelect is enabled - """ - - ENABLED = "Enabled" - DISABLED = "Disabled" - -class NCrossValidationsMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Determines how N-Cross validations value is determined. - """ - - #: Determine N-Cross validations value automatically. Supported only for 'Forecasting' AutoML - #: task. - AUTO = "Auto" - #: Use custom N-Cross validations value. - CUSTOM = "Custom" - -class Network(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """network of this container. - """ - - BRIDGE = "Bridge" - HOST = "Host" - -class NlpLearningRateScheduler(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum of learning rate schedulers that aligns with those supported by HF - """ - - #: No learning rate schedule. - NONE = "None" - #: Linear warmup and decay. - LINEAR = "Linear" - #: Linear warmup then cosine decay. - COSINE = "Cosine" - #: Linear warmup, cosine decay, then restart to initial LR. - COSINE_WITH_RESTARTS = "CosineWithRestarts" - #: Increase linearly then polynomially decay. - POLYNOMIAL = "Polynomial" - #: Constant learning rate. - CONSTANT = "Constant" - #: Linear warmup followed by constant value. - CONSTANT_WITH_WARMUP = "ConstantWithWarmup" - -class NodeState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """State of the compute node. Values are idle, running, preparing, unusable, leaving and - preempted. - """ - - IDLE = "idle" - RUNNING = "running" - PREPARING = "preparing" - UNUSABLE = "unusable" - LEAVING = "leaving" - PREEMPTED = "preempted" - -class NodesValueType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The enumerated types for the nodes value - """ - - ALL = "All" - CUSTOM = "Custom" - -class NumericalDataDriftMetric(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: The Jensen Shannon Distance (JSD) metric. - JENSEN_SHANNON_DISTANCE = "JensenShannonDistance" - #: The Population Stability Index (PSI) metric. - POPULATION_STABILITY_INDEX = "PopulationStabilityIndex" - #: The Normalized Wasserstein Distance metric. - NORMALIZED_WASSERSTEIN_DISTANCE = "NormalizedWassersteinDistance" - #: The Two Sample Kolmogorov-Smirnov Test (two-sample K–S) metric. - TWO_SAMPLE_KOLMOGOROV_SMIRNOV_TEST = "TwoSampleKolmogorovSmirnovTest" - -class NumericalDataQualityMetric(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: Calculates the rate of null values. - NULL_VALUE_RATE = "NullValueRate" - #: Calculates the rate of data type errors. - DATA_TYPE_ERROR_RATE = "DataTypeErrorRate" - #: Calculates the rate values are out of bounds. - OUT_OF_BOUNDS_RATE = "OutOfBoundsRate" - -class NumericalPredictionDriftMetric(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: The Jensen Shannon Distance (JSD) metric. - JENSEN_SHANNON_DISTANCE = "JensenShannonDistance" - #: The Population Stability Index (PSI) metric. - POPULATION_STABILITY_INDEX = "PopulationStabilityIndex" - #: The Normalized Wasserstein Distance metric. - NORMALIZED_WASSERSTEIN_DISTANCE = "NormalizedWassersteinDistance" - #: The Two Sample Kolmogorov-Smirnov Test (two-sample K–S) metric. - TWO_SAMPLE_KOLMOGOROV_SMIRNOV_TEST = "TwoSampleKolmogorovSmirnovTest" - -class ObjectDetectionPrimaryMetrics(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Primary metrics for Image ObjectDetection task. - """ - - #: Mean Average Precision (MAP) is the average of AP (Average Precision). - #: AP is calculated for each class and averaged to get the MAP. - MEAN_AVERAGE_PRECISION = "MeanAveragePrecision" - -class OneLakeArtifactType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine OneLake artifact type. - """ - - LAKE_HOUSE = "LakeHouse" - -class OperatingSystemType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The type of operating system. - """ - - LINUX = "Linux" - WINDOWS = "Windows" - -class OperationName(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Name of the last operation. - """ - - CREATE = "Create" - START = "Start" - STOP = "Stop" - RESTART = "Restart" - RESIZE = "Resize" - REIMAGE = "Reimage" - DELETE = "Delete" - -class OperationStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Operation status. - """ - - IN_PROGRESS = "InProgress" - SUCCEEDED = "Succeeded" - CREATE_FAILED = "CreateFailed" - START_FAILED = "StartFailed" - STOP_FAILED = "StopFailed" - RESTART_FAILED = "RestartFailed" - RESIZE_FAILED = "ResizeFailed" - REIMAGE_FAILED = "ReimageFailed" - DELETE_FAILED = "DeleteFailed" - -class OperationTrigger(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Trigger of operation. - """ - - USER = "User" - SCHEDULE = "Schedule" - IDLE_SHUTDOWN = "IdleShutdown" - -class OrderString(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - CREATED_AT_DESC = "CreatedAtDesc" - CREATED_AT_ASC = "CreatedAtAsc" - UPDATED_AT_DESC = "UpdatedAtDesc" - UPDATED_AT_ASC = "UpdatedAtAsc" - -class OsType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Compute OS Type - """ - - LINUX = "Linux" - WINDOWS = "Windows" - -class OutputDeliveryMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Output data delivery mode enums. - """ - - READ_WRITE_MOUNT = "ReadWriteMount" - UPLOAD = "Upload" - DIRECT = "Direct" - -class PackageBuildState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Package build state returned in package response. - """ - - NOT_STARTED = "NotStarted" - RUNNING = "Running" - SUCCEEDED = "Succeeded" - FAILED = "Failed" - -class PackageInputDeliveryMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Mounting type of the model or the inputs - """ - - COPY = "Copy" - DOWNLOAD = "Download" - -class PackageInputType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Type of the inputs. - """ - - URI_FILE = "UriFile" - URI_FOLDER = "UriFolder" - -class PatchStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The os patching status. - """ - - COMPLETED_WITH_WARNINGS = "CompletedWithWarnings" - FAILED = "Failed" - IN_PROGRESS = "InProgress" - SUCCEEDED = "Succeeded" - UNKNOWN = "Unknown" - -class PendingUploadCredentialType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the PendingUpload credentials type. - """ - - SAS = "SAS" - -class PendingUploadType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Type of storage to use for the pending upload location - """ - - NONE = "None" - TEMPORARY_BLOB_REFERENCE = "TemporaryBlobReference" - -class PrivateEndpointConnectionProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The current provisioning state. - """ - - SUCCEEDED = "Succeeded" - CREATING = "Creating" - DELETING = "Deleting" - FAILED = "Failed" - -class ProtectionLevel(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Protection level associated with the Intellectual Property. - """ - - #: All means Intellectual Property is fully protected. - ALL = "All" - #: None means it is not an Intellectual Property. - NONE = "None" - -class Protocol(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Protocol over which communication will happen over this endpoint - """ - - TCP = "tcp" - UDP = "udp" - HTTP = "http" - -class ProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The provision state of the cluster. Valid values are Unknown, Updating, Provisioning, - Succeeded, and Failed. - """ - - UNKNOWN = "Unknown" - UPDATING = "Updating" - CREATING = "Creating" - DELETING = "Deleting" - SUCCEEDED = "Succeeded" - FAILED = "Failed" - CANCELED = "Canceled" - -class ProvisioningStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The current deployment state of schedule. - """ - - COMPLETED = "Completed" - PROVISIONING = "Provisioning" - FAILED = "Failed" - -class PublicNetworkAccessType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine whether PublicNetworkAccess is Enabled or Disabled. - """ - - ENABLED = "Enabled" - DISABLED = "Disabled" - -class QuotaUnit(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """An enum describing the unit of quota measurement. - """ - - COUNT = "Count" - -class RandomSamplingAlgorithmRule(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The specific type of random algorithm - """ - - RANDOM = "Random" - SOBOL = "Sobol" - -class RecurrenceFrequency(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to describe the frequency of a recurrence schedule - """ - - #: Minute frequency. - MINUTE = "Minute" - #: Hour frequency. - HOUR = "Hour" - #: Day frequency. - DAY = "Day" - #: Week frequency. - WEEK = "Week" - #: Month frequency. - MONTH = "Month" - -class ReferenceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine which reference method to use for an asset. - """ - - ID = "Id" - DATA_PATH = "DataPath" - OUTPUT_PATH = "OutputPath" - -class RegressionModelPerformanceMetric(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: The Mean Absolute Error (MAE) metric. - MEAN_ABSOLUTE_ERROR = "MeanAbsoluteError" - #: The Root Mean Squared Error (RMSE) metric. - ROOT_MEAN_SQUARED_ERROR = "RootMeanSquaredError" - #: The Mean Squared Error (MSE) metric. - MEAN_SQUARED_ERROR = "MeanSquaredError" - -class RegressionModels(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum for all Regression models supported by AutoML. - """ - - #: Elastic net is a popular type of regularized linear regression that combines two popular - #: penalties, specifically the L1 and L2 penalty functions. - ELASTIC_NET = "ElasticNet" - #: The technique of transiting week learners into a strong learner is called Boosting. The - #: gradient boosting algorithm process works on this theory of execution. - GRADIENT_BOOSTING = "GradientBoosting" - #: Decision Trees are a non-parametric supervised learning method used for both classification and - #: regression tasks. - #: The goal is to create a model that predicts the value of a target variable by learning simple - #: decision rules inferred from the data features. - DECISION_TREE = "DecisionTree" - #: K-nearest neighbors (KNN) algorithm uses 'feature similarity' to predict the values of new - #: datapoints - #: which further means that the new data point will be assigned a value based on how closely it - #: matches the points in the training set. - KNN = "KNN" - #: Lasso model fit with Least Angle Regression a.k.a. Lars. It is a Linear Model trained with an - #: L1 prior as regularizer. - LASSO_LARS = "LassoLars" - #: SGD: Stochastic gradient descent is an optimization algorithm often used in machine learning - #: applications - #: to find the model parameters that correspond to the best fit between predicted and actual - #: outputs. - #: It's an inexact but powerful technique. - SGD = "SGD" - #: Random forest is a supervised learning algorithm. - #: The "forest" it builds, is an ensemble of decision trees, usually trained with the bagging - #: method. - #: The general idea of the bagging method is that a combination of learning models increases the - #: overall result. - RANDOM_FOREST = "RandomForest" - #: Extreme Trees is an ensemble machine learning algorithm that combines the predictions from many - #: decision trees. It is related to the widely used random forest algorithm. - EXTREME_RANDOM_TREES = "ExtremeRandomTrees" - #: LightGBM is a gradient boosting framework that uses tree based learning algorithms. - LIGHT_GBM = "LightGBM" - #: XGBoostRegressor: Extreme Gradient Boosting Regressor is a supervised machine learning model - #: using ensemble of base learners. - XG_BOOST_REGRESSOR = "XGBoostRegressor" - -class RegressionPrimaryMetrics(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Primary metrics for Regression task. - """ - - #: The Spearman's rank coefficient of correlation is a nonparametric measure of rank correlation. - SPEARMAN_CORRELATION = "SpearmanCorrelation" - #: The Normalized Root Mean Squared Error (NRMSE) the RMSE facilitates the comparison between - #: models with different scales. - NORMALIZED_ROOT_MEAN_SQUARED_ERROR = "NormalizedRootMeanSquaredError" - #: The R2 score is one of the performance evaluation measures for forecasting-based machine - #: learning models. - R2_SCORE = "R2Score" - #: The Normalized Mean Absolute Error (NMAE) is a validation metric to compare the Mean Absolute - #: Error (MAE) of (time) series with different scales. - NORMALIZED_MEAN_ABSOLUTE_ERROR = "NormalizedMeanAbsoluteError" - -class RemoteLoginPortPublicAccess(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh - port is closed on all nodes of the cluster. Enabled - Indicates that the public ssh port is - open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed - on all nodes of the cluster if VNet is defined, else is open all public nodes. It can be - default only during cluster creation time, after creation it will be either enabled or - disabled. - """ - - ENABLED = "Enabled" - DISABLED = "Disabled" - NOT_SPECIFIED = "NotSpecified" - -class RollingRateType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - YEAR = "Year" - MONTH = "Month" - DAY = "Day" - HOUR = "Hour" - MINUTE = "Minute" - -class RuleAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The action enum for networking rule. - """ - - ALLOW = "Allow" - DENY = "Deny" - -class RuleCategory(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Category of a managed network Outbound Rule of a machine learning workspace. - """ - - REQUIRED = "Required" - RECOMMENDED = "Recommended" - USER_DEFINED = "UserDefined" - -class RuleStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Type of a managed network Outbound Rule of a machine learning workspace. - """ - - INACTIVE = "Inactive" - ACTIVE = "Active" - -class RuleType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Type of a managed network Outbound Rule of a machine learning workspace. - """ - - FQDN = "FQDN" - PRIVATE_ENDPOINT = "PrivateEndpoint" - SERVICE_TAG = "ServiceTag" - -class SamplingAlgorithmType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - GRID = "Grid" - RANDOM = "Random" - BAYESIAN = "Bayesian" - -class ScaleType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - DEFAULT = "Default" - TARGET_UTILIZATION = "TargetUtilization" - -class ScheduleActionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - CREATE_JOB = "CreateJob" - INVOKE_BATCH_ENDPOINT = "InvokeBatchEndpoint" - IMPORT_DATA = "ImportData" - CREATE_MONITOR = "CreateMonitor" - -class ScheduleListViewType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - ENABLED_ONLY = "EnabledOnly" - DISABLED_ONLY = "DisabledOnly" - ALL = "All" - -class ScheduleProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The current deployment state of schedule. - """ - - COMPLETED = "Completed" - PROVISIONING = "Provisioning" - FAILED = "Failed" - -class ScheduleProvisioningStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - CREATING = "Creating" - UPDATING = "Updating" - DELETING = "Deleting" - SUCCEEDED = "Succeeded" - FAILED = "Failed" - CANCELED = "Canceled" - -class ScheduleStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Is the schedule enabled or disabled? - """ - - ENABLED = "Enabled" - DISABLED = "Disabled" - -class SeasonalityMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Forecasting seasonality mode. - """ - - #: Seasonality to be determined automatically. - AUTO = "Auto" - #: Use the custom seasonality value. - CUSTOM = "Custom" - -class SecretsType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the datastore secrets type. - """ - - ACCOUNT_KEY = "AccountKey" - CERTIFICATE = "Certificate" - SAS = "Sas" - SERVICE_PRINCIPAL = "ServicePrincipal" - KERBEROS_PASSWORD = "KerberosPassword" - KERBEROS_KEYTAB = "KerberosKeytab" - -class ServiceDataAccessAuthIdentity(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - #: Do not use any identity for service data access. - NONE = "None" - #: Use the system assigned managed identity of the Workspace to authenticate service data access. - WORKSPACE_SYSTEM_ASSIGNED_IDENTITY = "WorkspaceSystemAssignedIdentity" - #: Use the user assigned managed identity of the Workspace to authenticate service data access. - WORKSPACE_USER_ASSIGNED_IDENTITY = "WorkspaceUserAssignedIdentity" - -class ShortSeriesHandlingConfiguration(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The parameter defining how if AutoML should handle short time series. - """ - - #: Represents no/null value. - NONE = "None" - #: Short series will be padded if there are no long series, otherwise short series will be - #: dropped. - AUTO = "Auto" - #: All the short series will be padded. - PAD = "Pad" - #: All the short series will be dropped. - DROP = "Drop" - -class SkuScaleType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Node scaling setting for the compute sku. - """ - - #: Automatically scales node count. - AUTOMATIC = "Automatic" - #: Node count scaled upon user request. - MANUAL = "Manual" - #: Fixed set of nodes. - NONE = "None" - -class SkuTier(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """This field is required to be implemented by the Resource Provider if the service has more than - one tier, but is not required on a PUT. - """ - - FREE = "Free" - BASIC = "Basic" - STANDARD = "Standard" - PREMIUM = "Premium" - -class SourceType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Data source type. - """ - - DATASET = "Dataset" - DATASTORE = "Datastore" - URI = "URI" - -class SparkJobEntryType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - SPARK_JOB_PYTHON_ENTRY = "SparkJobPythonEntry" - SPARK_JOB_SCALA_ENTRY = "SparkJobScalaEntry" - -class SshPublicAccess(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh - port is closed on this instance. Enabled - Indicates that the public ssh port is open and - accessible according to the VNet/subnet policy if applicable. - """ - - ENABLED = "Enabled" - DISABLED = "Disabled" - -class SslConfigStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enable or disable ssl for scoring - """ - - DISABLED = "Disabled" - ENABLED = "Enabled" - AUTO = "Auto" - -class StackMetaLearnerType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The meta-learner is a model trained on the output of the individual heterogeneous models. - Default meta-learners are LogisticRegression for classification tasks (or LogisticRegressionCV - if cross-validation is enabled) and ElasticNet for regression/forecasting tasks (or - ElasticNetCV if cross-validation is enabled). - This parameter can be one of the following strings: LogisticRegression, LogisticRegressionCV, - LightGBMClassifier, ElasticNet, ElasticNetCV, LightGBMRegressor, or LinearRegression - """ - - NONE = "None" - #: Default meta-learners are LogisticRegression for classification tasks. - LOGISTIC_REGRESSION = "LogisticRegression" - #: Default meta-learners are LogisticRegression for classification task when CV is on. - LOGISTIC_REGRESSION_CV = "LogisticRegressionCV" - LIGHT_GBM_CLASSIFIER = "LightGBMClassifier" - #: Default meta-learners are LogisticRegression for regression task. - ELASTIC_NET = "ElasticNet" - #: Default meta-learners are LogisticRegression for regression task when CV is on. - ELASTIC_NET_CV = "ElasticNetCV" - LIGHT_GBM_REGRESSOR = "LightGBMRegressor" - LINEAR_REGRESSION = "LinearRegression" - -class Status(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Status of update workspace quota. - """ - - UNDEFINED = "Undefined" - SUCCESS = "Success" - FAILURE = "Failure" - INVALID_QUOTA_BELOW_CLUSTER_MINIMUM = "InvalidQuotaBelowClusterMinimum" - INVALID_QUOTA_EXCEEDS_SUBSCRIPTION_LIMIT = "InvalidQuotaExceedsSubscriptionLimit" - INVALID_VM_FAMILY_NAME = "InvalidVMFamilyName" - OPERATION_NOT_SUPPORTED_FOR_SKU = "OperationNotSupportedForSku" - OPERATION_NOT_ENABLED_FOR_REGION = "OperationNotEnabledForRegion" - -class StatusMessageLevel(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - ERROR = "Error" - INFORMATION = "Information" - WARNING = "Warning" - -class StochasticOptimizer(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Stochastic optimizer for image models. - """ - - #: No optimizer selected. - NONE = "None" - #: Stochastic Gradient Descent optimizer. - SGD = "Sgd" - #: Adam is algorithm the optimizes stochastic objective functions based on adaptive estimates of - #: moments. - ADAM = "Adam" - #: AdamW is a variant of the optimizer Adam that has an improved implementation of weight decay. - ADAMW = "Adamw" - -class StorageAccountType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """type of this storage account. - """ - - STANDARD_LRS = "Standard_LRS" - PREMIUM_LRS = "Premium_LRS" - -class TargetAggregationFunction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Target aggregate function. - """ - - #: Represent no value set. - NONE = "None" - SUM = "Sum" - MAX = "Max" - MIN = "Min" - MEAN = "Mean" - -class TargetLagsMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Target lags selection modes. - """ - - #: Target lags to be determined automatically. - AUTO = "Auto" - #: Use the custom target lags. - CUSTOM = "Custom" - -class TargetRollingWindowSizeMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Target rolling windows size mode. - """ - - #: Determine rolling windows size automatically. - AUTO = "Auto" - #: Use the specified rolling window size. - CUSTOM = "Custom" - -class TaskType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """AutoMLJob Task type. - """ - - #: Classification in machine learning and statistics is a supervised learning approach in which - #: the computer program learns from the data given to it and make new observations or - #: classifications. - CLASSIFICATION = "Classification" - #: Regression means to predict the value using the input data. Regression models are used to - #: predict a continuous value. - REGRESSION = "Regression" - #: Forecasting is a special kind of regression task that deals with time-series data and creates - #: forecasting model - #: that can be used to predict the near future values based on the inputs. - FORECASTING = "Forecasting" - #: Image Classification. Multi-class image classification is used when an image is classified with - #: only a single label - #: from a set of classes - e.g. each image is classified as either an image of a 'cat' or a 'dog' - #: or a 'duck'. - IMAGE_CLASSIFICATION = "ImageClassification" - #: Image Classification Multilabel. Multi-label image classification is used when an image could - #: have one or more labels - #: from a set of labels - e.g. an image could be labeled with both 'cat' and 'dog'. - IMAGE_CLASSIFICATION_MULTILABEL = "ImageClassificationMultilabel" - #: Image Object Detection. Object detection is used to identify objects in an image and locate - #: each object with a - #: bounding box e.g. locate all dogs and cats in an image and draw a bounding box around each. - IMAGE_OBJECT_DETECTION = "ImageObjectDetection" - #: Image Instance Segmentation. Instance segmentation is used to identify objects in an image at - #: the pixel level, - #: drawing a polygon around each object in the image. - IMAGE_INSTANCE_SEGMENTATION = "ImageInstanceSegmentation" - #: Text classification (also known as text tagging or text categorization) is the process of - #: sorting texts into categories. - #: Categories are mutually exclusive. - TEXT_CLASSIFICATION = "TextClassification" - #: Multilabel classification task assigns each sample to a group (zero or more) of target labels. - TEXT_CLASSIFICATION_MULTILABEL = "TextClassificationMultilabel" - #: Text Named Entity Recognition a.k.a. TextNER. - #: Named Entity Recognition (NER) is the ability to take free-form text and identify the - #: occurrences of entities such as people, locations, organizations, and more. - TEXT_NER = "TextNER" - -class TextAnnotationType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Annotation type of text data. - """ - - CLASSIFICATION = "Classification" - NAMED_ENTITY_RECOGNITION = "NamedEntityRecognition" - -class TrainingMode(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Training mode dictates whether to use distributed training or not - """ - - #: Auto mode. - AUTO = "Auto" - #: Distributed training mode. - DISTRIBUTED = "Distributed" - #: Non distributed training mode. - NON_DISTRIBUTED = "NonDistributed" - -class TriggerType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - RECURRENCE = "Recurrence" - CRON = "Cron" - -class UnderlyingResourceAction(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - - DELETE = "Delete" - DETACH = "Detach" - -class UnitOfMeasure(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The unit of time measurement for the specified VM price. Example: OneHour - """ - - ONE_HOUR = "OneHour" - -class UsageUnit(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """An enum describing the unit of usage measurement. - """ - - COUNT = "Count" - -class UseStl(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Configure STL Decomposition of the time-series target column. - """ - - #: No stl decomposition. - NONE = "None" - SEASON = "Season" - SEASON_TREND = "SeasonTrend" - -class ValidationMetricType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Metric computation method to use for validation metrics in image tasks. - """ - - #: No metric. - NONE = "None" - #: Coco metric. - COCO = "Coco" - #: Voc metric. - VOC = "Voc" - #: CocoVoc metric. - COCO_VOC = "CocoVoc" - -class VMPriceOSType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Operating system type used by the VM. - """ - - LINUX = "Linux" - WINDOWS = "Windows" - -class VmPriority(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Virtual Machine priority - """ - - DEDICATED = "Dedicated" - LOW_PRIORITY = "LowPriority" - -class VMTier(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """The type of the VM. - """ - - STANDARD = "Standard" - LOW_PRIORITY = "LowPriority" - SPOT = "Spot" - -class VolumeDefinitionType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe - """ - - BIND = "bind" - VOLUME = "volume" - TMPFS = "tmpfs" - NPIPE = "npipe" - -class WebhookType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum to determine the webhook callback service type. - """ - - AZURE_DEV_OPS = "AzureDevOps" - -class WeekDay(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): - """Enum of weekday - """ - - #: Monday weekday. - MONDAY = "Monday" - #: Tuesday weekday. - TUESDAY = "Tuesday" - #: Wednesday weekday. - WEDNESDAY = "Wednesday" - #: Thursday weekday. - THURSDAY = "Thursday" - #: Friday weekday. - FRIDAY = "Friday" - #: Saturday weekday. - SATURDAY = "Saturday" - #: Sunday weekday. - SUNDAY = "Sunday" diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_models.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_models.py deleted file mode 100644 index 7120ee4c486a..000000000000 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_models.py +++ /dev/null @@ -1,30965 +0,0 @@ -# coding=utf-8 -# -------------------------------------------------------------------------- -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. See License.txt in the project root for license information. -# Code generated by Microsoft (R) AutoRest Code Generator. -# Changes may cause incorrect behavior and will be lost if the code is regenerated. -# -------------------------------------------------------------------------- - -from azure.core.exceptions import HttpResponseError -import msrest.serialization - - -class WorkspaceConnectionPropertiesV2(msrest.serialization.Model): - """WorkspaceConnectionPropertiesV2. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AccessKeyAuthTypeWorkspaceConnectionProperties, ApiKeyAuthWorkspaceConnectionProperties, CustomKeysWorkspaceConnectionProperties, ManagedIdentityAuthTypeWorkspaceConnectionProperties, NoneAuthTypeWorkspaceConnectionProperties, PATAuthTypeWorkspaceConnectionProperties, SASAuthTypeWorkspaceConnectionProperties, ServicePrincipalAuthTypeWorkspaceConnectionProperties, UsernamePasswordAuthTypeWorkspaceConnectionProperties. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar expiry_time: - :vartype expiry_time: ~datetime.datetime - :ivar metadata: Any object. - :vartype metadata: any - :ivar target: - :vartype target: str - """ - - _validation = { - 'auth_type': {'required': True}, - } - - _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - } - - _subtype_map = { - 'auth_type': {'AccessKey': 'AccessKeyAuthTypeWorkspaceConnectionProperties', 'ApiKey': 'ApiKeyAuthWorkspaceConnectionProperties', 'CustomKeys': 'CustomKeysWorkspaceConnectionProperties', 'ManagedIdentity': 'ManagedIdentityAuthTypeWorkspaceConnectionProperties', 'None': 'NoneAuthTypeWorkspaceConnectionProperties', 'PAT': 'PATAuthTypeWorkspaceConnectionProperties', 'SAS': 'SASAuthTypeWorkspaceConnectionProperties', 'ServicePrincipal': 'ServicePrincipalAuthTypeWorkspaceConnectionProperties', 'UsernamePassword': 'UsernamePasswordAuthTypeWorkspaceConnectionProperties'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :keyword expiry_time: - :paramtype expiry_time: ~datetime.datetime - :keyword metadata: Any object. - :paramtype metadata: any - :keyword target: - :paramtype target: str - """ - super(WorkspaceConnectionPropertiesV2, self).__init__(**kwargs) - self.auth_type = None # type: Optional[str] - self.category = kwargs.get('category', None) - self.expiry_time = kwargs.get('expiry_time', None) - self.metadata = kwargs.get('metadata', None) - self.target = kwargs.get('target', None) - - -class AccessKeyAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): - """AccessKeyAuthTypeWorkspaceConnectionProperties. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar expiry_time: - :vartype expiry_time: ~datetime.datetime - :ivar metadata: Any object. - :vartype metadata: any - :ivar target: - :vartype target: str - :ivar credentials: - :vartype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionAccessKey - """ - - _validation = { - 'auth_type': {'required': True}, - } - - _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionAccessKey'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :keyword expiry_time: - :paramtype expiry_time: ~datetime.datetime - :keyword metadata: Any object. - :paramtype metadata: any - :keyword target: - :paramtype target: str - :keyword credentials: - :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionAccessKey - """ - super(AccessKeyAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'AccessKey' # type: str - self.credentials = kwargs.get('credentials', None) - - -class DatastoreCredentials(msrest.serialization.Model): - """Base definition for datastore credentials. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AccountKeyDatastoreCredentials, CertificateDatastoreCredentials, KerberosKeytabCredentials, KerberosPasswordCredentials, NoneDatastoreCredentials, SasDatastoreCredentials, ServicePrincipalDatastoreCredentials. - - All required parameters must be populated in order to send to Azure. - - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". - :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType - """ - - _validation = { - 'credentials_type': {'required': True}, - } - - _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - } - - _subtype_map = { - 'credentials_type': {'AccountKey': 'AccountKeyDatastoreCredentials', 'Certificate': 'CertificateDatastoreCredentials', 'KerberosKeytab': 'KerberosKeytabCredentials', 'KerberosPassword': 'KerberosPasswordCredentials', 'None': 'NoneDatastoreCredentials', 'Sas': 'SasDatastoreCredentials', 'ServicePrincipal': 'ServicePrincipalDatastoreCredentials'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(DatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = None # type: Optional[str] - - -class AccountKeyDatastoreCredentials(DatastoreCredentials): - """Account key datastore credentials configuration. - - All required parameters must be populated in order to send to Azure. - - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". - :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType - :ivar secrets: Required. [Required] Storage account secrets. - :vartype secrets: ~azure.mgmt.machinelearningservices.models.AccountKeyDatastoreSecrets - """ - - _validation = { - 'credentials_type': {'required': True}, - 'secrets': {'required': True}, - } - - _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'AccountKeyDatastoreSecrets'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword secrets: Required. [Required] Storage account secrets. - :paramtype secrets: ~azure.mgmt.machinelearningservices.models.AccountKeyDatastoreSecrets - """ - super(AccountKeyDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'AccountKey' # type: str - self.secrets = kwargs['secrets'] - - -class DatastoreSecrets(msrest.serialization.Model): - """Base definition for datastore secrets. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AccountKeyDatastoreSecrets, CertificateDatastoreSecrets, KerberosKeytabSecrets, KerberosPasswordSecrets, SasDatastoreSecrets, ServicePrincipalDatastoreSecrets. - - All required parameters must be populated in order to send to Azure. - - :ivar secrets_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", "Sas", - "ServicePrincipal", "KerberosPassword", "KerberosKeytab". - :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType - """ - - _validation = { - 'secrets_type': {'required': True}, - } - - _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - } - - _subtype_map = { - 'secrets_type': {'AccountKey': 'AccountKeyDatastoreSecrets', 'Certificate': 'CertificateDatastoreSecrets', 'KerberosKeytab': 'KerberosKeytabSecrets', 'KerberosPassword': 'KerberosPasswordSecrets', 'Sas': 'SasDatastoreSecrets', 'ServicePrincipal': 'ServicePrincipalDatastoreSecrets'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(DatastoreSecrets, self).__init__(**kwargs) - self.secrets_type = None # type: Optional[str] - - -class AccountKeyDatastoreSecrets(DatastoreSecrets): - """Datastore account key secrets. - - All required parameters must be populated in order to send to Azure. - - :ivar secrets_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", "Sas", - "ServicePrincipal", "KerberosPassword", "KerberosKeytab". - :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType - :ivar key: Storage account key. - :vartype key: str - """ - - _validation = { - 'secrets_type': {'required': True}, - } - - _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword key: Storage account key. - :paramtype key: str - """ - super(AccountKeyDatastoreSecrets, self).__init__(**kwargs) - self.secrets_type = 'AccountKey' # type: str - self.key = kwargs.get('key', None) - - -class AcrDetails(msrest.serialization.Model): - """Details of ACR account to be used for the Registry. - - :ivar system_created_acr_account: Details of system created ACR account to be used for the - Registry. - :vartype system_created_acr_account: - ~azure.mgmt.machinelearningservices.models.SystemCreatedAcrAccount - :ivar user_created_acr_account: Details of user created ACR account to be used for the - Registry. - :vartype user_created_acr_account: - ~azure.mgmt.machinelearningservices.models.UserCreatedAcrAccount - """ - - _attribute_map = { - 'system_created_acr_account': {'key': 'systemCreatedAcrAccount', 'type': 'SystemCreatedAcrAccount'}, - 'user_created_acr_account': {'key': 'userCreatedAcrAccount', 'type': 'UserCreatedAcrAccount'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword system_created_acr_account: Details of system created ACR account to be used for the - Registry. - :paramtype system_created_acr_account: - ~azure.mgmt.machinelearningservices.models.SystemCreatedAcrAccount - :keyword user_created_acr_account: Details of user created ACR account to be used for the - Registry. - :paramtype user_created_acr_account: - ~azure.mgmt.machinelearningservices.models.UserCreatedAcrAccount - """ - super(AcrDetails, self).__init__(**kwargs) - self.system_created_acr_account = kwargs.get('system_created_acr_account', None) - self.user_created_acr_account = kwargs.get('user_created_acr_account', None) - - -class AKSSchema(msrest.serialization.Model): - """AKSSchema. - - :ivar properties: AKS properties. - :vartype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties - """ - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'AKSSchemaProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: AKS properties. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties - """ - super(AKSSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class Compute(msrest.serialization.Model): - """Machine Learning compute object. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AKS, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HDInsight, Kubernetes, SynapseSpark, VirtualMachine. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar compute_location: Location for the underlying compute. - :vartype compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar description: The description of the Machine Learning compute. - :vartype description: str - :ivar created_on: The time at which the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The time at which the compute was last modified. - :vartype modified_on: ~datetime.datetime - :ivar resource_id: ARM resource id of the underlying compute. - :vartype resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI - and AAD exclusively for authentication. - :vartype disable_local_auth: bool - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, - } - - _subtype_map = { - 'compute_type': {'AKS': 'AKS', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HDInsight', 'Kubernetes': 'Kubernetes', 'SynapseSpark': 'SynapseSpark', 'VirtualMachine': 'VirtualMachine'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword compute_location: Location for the underlying compute. - :paramtype compute_location: str - :keyword description: The description of the Machine Learning compute. - :paramtype description: str - :keyword resource_id: ARM resource id of the underlying compute. - :paramtype resource_id: str - :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only - MSI and AAD exclusively for authentication. - :paramtype disable_local_auth: bool - """ - super(Compute, self).__init__(**kwargs) - self.compute_type = None # type: Optional[str] - self.compute_location = kwargs.get('compute_location', None) - self.provisioning_state = None - self.description = kwargs.get('description', None) - self.created_on = None - self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) - self.provisioning_errors = None - self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) - - -class AKS(Compute, AKSSchema): - """A Machine Learning compute based on AKS. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar properties: AKS properties. - :vartype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar compute_location: Location for the underlying compute. - :vartype compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar description: The description of the Machine Learning compute. - :vartype description: str - :ivar created_on: The time at which the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The time at which the compute was last modified. - :vartype modified_on: ~datetime.datetime - :ivar resource_id: ARM resource id of the underlying compute. - :vartype resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI - and AAD exclusively for authentication. - :vartype disable_local_auth: bool - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'AKSSchemaProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: AKS properties. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties - :keyword compute_location: Location for the underlying compute. - :paramtype compute_location: str - :keyword description: The description of the Machine Learning compute. - :paramtype description: str - :keyword resource_id: ARM resource id of the underlying compute. - :paramtype resource_id: str - :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only - MSI and AAD exclusively for authentication. - :paramtype disable_local_auth: bool - """ - super(AKS, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'AKS' # type: str - self.compute_location = kwargs.get('compute_location', None) - self.provisioning_state = None - self.description = kwargs.get('description', None) - self.created_on = None - self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) - self.provisioning_errors = None - self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) - - -class AksComputeSecretsProperties(msrest.serialization.Model): - """Properties of AksComputeSecrets. - - :ivar user_kube_config: Content of kubeconfig file that can be used to connect to the - Kubernetes cluster. - :vartype user_kube_config: str - :ivar admin_kube_config: Content of kubeconfig file that can be used to connect to the - Kubernetes cluster. - :vartype admin_kube_config: str - :ivar image_pull_secret_name: Image registry pull secret. - :vartype image_pull_secret_name: str - """ - - _attribute_map = { - 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'}, - 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'}, - 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword user_kube_config: Content of kubeconfig file that can be used to connect to the - Kubernetes cluster. - :paramtype user_kube_config: str - :keyword admin_kube_config: Content of kubeconfig file that can be used to connect to the - Kubernetes cluster. - :paramtype admin_kube_config: str - :keyword image_pull_secret_name: Image registry pull secret. - :paramtype image_pull_secret_name: str - """ - super(AksComputeSecretsProperties, self).__init__(**kwargs) - self.user_kube_config = kwargs.get('user_kube_config', None) - self.admin_kube_config = kwargs.get('admin_kube_config', None) - self.image_pull_secret_name = kwargs.get('image_pull_secret_name', None) - - -class ComputeSecrets(msrest.serialization.Model): - """Secrets related to a Machine Learning compute. Might differ for every type of compute. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets. - - All required parameters must be populated in order to send to Azure. - - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - """ - - _validation = { - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - } - - _subtype_map = { - 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ComputeSecrets, self).__init__(**kwargs) - self.compute_type = None # type: Optional[str] - - -class AksComputeSecrets(ComputeSecrets, AksComputeSecretsProperties): - """Secrets related to a Machine Learning compute based on AKS. - - All required parameters must be populated in order to send to Azure. - - :ivar user_kube_config: Content of kubeconfig file that can be used to connect to the - Kubernetes cluster. - :vartype user_kube_config: str - :ivar admin_kube_config: Content of kubeconfig file that can be used to connect to the - Kubernetes cluster. - :vartype admin_kube_config: str - :ivar image_pull_secret_name: Image registry pull secret. - :vartype image_pull_secret_name: str - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - """ - - _validation = { - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'}, - 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'}, - 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword user_kube_config: Content of kubeconfig file that can be used to connect to the - Kubernetes cluster. - :paramtype user_kube_config: str - :keyword admin_kube_config: Content of kubeconfig file that can be used to connect to the - Kubernetes cluster. - :paramtype admin_kube_config: str - :keyword image_pull_secret_name: Image registry pull secret. - :paramtype image_pull_secret_name: str - """ - super(AksComputeSecrets, self).__init__(**kwargs) - self.user_kube_config = kwargs.get('user_kube_config', None) - self.admin_kube_config = kwargs.get('admin_kube_config', None) - self.image_pull_secret_name = kwargs.get('image_pull_secret_name', None) - self.compute_type = 'AKS' # type: str - - -class AksNetworkingConfiguration(msrest.serialization.Model): - """Advance configuration for AKS networking. - - :ivar subnet_id: Virtual network subnet resource ID the compute nodes belong to. - :vartype subnet_id: str - :ivar service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must - not overlap with any Subnet IP ranges. - :vartype service_cidr: str - :ivar dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within - the Kubernetes service address range specified in serviceCidr. - :vartype dns_service_ip: str - :ivar docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It - must not overlap with any Subnet IP ranges or the Kubernetes service address range. - :vartype docker_bridge_cidr: str - """ - - _validation = { - 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, - 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'}, - 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, - } - - _attribute_map = { - 'subnet_id': {'key': 'subnetId', 'type': 'str'}, - 'service_cidr': {'key': 'serviceCidr', 'type': 'str'}, - 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'}, - 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword subnet_id: Virtual network subnet resource ID the compute nodes belong to. - :paramtype subnet_id: str - :keyword service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It - must not overlap with any Subnet IP ranges. - :paramtype service_cidr: str - :keyword dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be - within the Kubernetes service address range specified in serviceCidr. - :paramtype dns_service_ip: str - :keyword docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It - must not overlap with any Subnet IP ranges or the Kubernetes service address range. - :paramtype docker_bridge_cidr: str - """ - super(AksNetworkingConfiguration, self).__init__(**kwargs) - self.subnet_id = kwargs.get('subnet_id', None) - self.service_cidr = kwargs.get('service_cidr', None) - self.dns_service_ip = kwargs.get('dns_service_ip', None) - self.docker_bridge_cidr = kwargs.get('docker_bridge_cidr', None) - - -class AKSSchemaProperties(msrest.serialization.Model): - """AKS properties. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar cluster_fqdn: Cluster full qualified domain name. - :vartype cluster_fqdn: str - :ivar system_services: System services. - :vartype system_services: list[~azure.mgmt.machinelearningservices.models.SystemService] - :ivar agent_count: Number of agents. - :vartype agent_count: int - :ivar agent_vm_size: Agent virtual machine size. - :vartype agent_vm_size: str - :ivar cluster_purpose: Intended usage of the cluster. Possible values include: "FastProd", - "DenseProd", "DevTest". Default value: "FastProd". - :vartype cluster_purpose: str or ~azure.mgmt.machinelearningservices.models.ClusterPurpose - :ivar ssl_configuration: SSL configuration. - :vartype ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration - :ivar aks_networking_configuration: AKS networking configuration for vnet. - :vartype aks_networking_configuration: - ~azure.mgmt.machinelearningservices.models.AksNetworkingConfiguration - :ivar load_balancer_type: Load Balancer Type. Possible values include: "PublicIp", - "InternalLoadBalancer". Default value: "PublicIp". - :vartype load_balancer_type: str or ~azure.mgmt.machinelearningservices.models.LoadBalancerType - :ivar load_balancer_subnet: Load Balancer Subnet. - :vartype load_balancer_subnet: str - """ - - _validation = { - 'system_services': {'readonly': True}, - 'agent_count': {'minimum': 0}, - } - - _attribute_map = { - 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'}, - 'system_services': {'key': 'systemServices', 'type': '[SystemService]'}, - 'agent_count': {'key': 'agentCount', 'type': 'int'}, - 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'}, - 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'}, - 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'}, - 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'}, - 'load_balancer_type': {'key': 'loadBalancerType', 'type': 'str'}, - 'load_balancer_subnet': {'key': 'loadBalancerSubnet', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword cluster_fqdn: Cluster full qualified domain name. - :paramtype cluster_fqdn: str - :keyword agent_count: Number of agents. - :paramtype agent_count: int - :keyword agent_vm_size: Agent virtual machine size. - :paramtype agent_vm_size: str - :keyword cluster_purpose: Intended usage of the cluster. Possible values include: "FastProd", - "DenseProd", "DevTest". Default value: "FastProd". - :paramtype cluster_purpose: str or ~azure.mgmt.machinelearningservices.models.ClusterPurpose - :keyword ssl_configuration: SSL configuration. - :paramtype ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration - :keyword aks_networking_configuration: AKS networking configuration for vnet. - :paramtype aks_networking_configuration: - ~azure.mgmt.machinelearningservices.models.AksNetworkingConfiguration - :keyword load_balancer_type: Load Balancer Type. Possible values include: "PublicIp", - "InternalLoadBalancer". Default value: "PublicIp". - :paramtype load_balancer_type: str or - ~azure.mgmt.machinelearningservices.models.LoadBalancerType - :keyword load_balancer_subnet: Load Balancer Subnet. - :paramtype load_balancer_subnet: str - """ - super(AKSSchemaProperties, self).__init__(**kwargs) - self.cluster_fqdn = kwargs.get('cluster_fqdn', None) - self.system_services = None - self.agent_count = kwargs.get('agent_count', None) - self.agent_vm_size = kwargs.get('agent_vm_size', None) - self.cluster_purpose = kwargs.get('cluster_purpose', "FastProd") - self.ssl_configuration = kwargs.get('ssl_configuration', None) - self.aks_networking_configuration = kwargs.get('aks_networking_configuration', None) - self.load_balancer_type = kwargs.get('load_balancer_type', "PublicIp") - self.load_balancer_subnet = kwargs.get('load_balancer_subnet', None) - - -class MonitoringFeatureFilterBase(msrest.serialization.Model): - """MonitoringFeatureFilterBase. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AllFeatures, FeatureSubset, TopNFeaturesByAttribution. - - All required parameters must be populated in order to send to Azure. - - :ivar filter_type: Required. [Required] Specifies the feature filter to leverage when selecting - features to calculate metrics over.Constant filled by server. Possible values include: - "AllFeatures", "TopNByAttribution", "FeatureSubset". - :vartype filter_type: str or - ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterType - """ - - _validation = { - 'filter_type': {'required': True}, - } - - _attribute_map = { - 'filter_type': {'key': 'filterType', 'type': 'str'}, - } - - _subtype_map = { - 'filter_type': {'AllFeatures': 'AllFeatures', 'FeatureSubset': 'FeatureSubset', 'TopNByAttribution': 'TopNFeaturesByAttribution'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(MonitoringFeatureFilterBase, self).__init__(**kwargs) - self.filter_type = None # type: Optional[str] - - -class AllFeatures(MonitoringFeatureFilterBase): - """AllFeatures. - - All required parameters must be populated in order to send to Azure. - - :ivar filter_type: Required. [Required] Specifies the feature filter to leverage when selecting - features to calculate metrics over.Constant filled by server. Possible values include: - "AllFeatures", "TopNByAttribution", "FeatureSubset". - :vartype filter_type: str or - ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterType - """ - - _validation = { - 'filter_type': {'required': True}, - } - - _attribute_map = { - 'filter_type': {'key': 'filterType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(AllFeatures, self).__init__(**kwargs) - self.filter_type = 'AllFeatures' # type: str - - -class Nodes(msrest.serialization.Model): - """Abstract Nodes definition. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AllNodes. - - All required parameters must be populated in order to send to Azure. - - :ivar nodes_value_type: Required. [Required] Type of the Nodes value.Constant filled by server. - Possible values include: "All", "Custom". - :vartype nodes_value_type: str or ~azure.mgmt.machinelearningservices.models.NodesValueType - """ - - _validation = { - 'nodes_value_type': {'required': True}, - } - - _attribute_map = { - 'nodes_value_type': {'key': 'nodesValueType', 'type': 'str'}, - } - - _subtype_map = { - 'nodes_value_type': {'All': 'AllNodes'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(Nodes, self).__init__(**kwargs) - self.nodes_value_type = None # type: Optional[str] - - -class AllNodes(Nodes): - """All nodes means the service will be running on all of the nodes of the job. - - All required parameters must be populated in order to send to Azure. - - :ivar nodes_value_type: Required. [Required] Type of the Nodes value.Constant filled by server. - Possible values include: "All", "Custom". - :vartype nodes_value_type: str or ~azure.mgmt.machinelearningservices.models.NodesValueType - """ - - _validation = { - 'nodes_value_type': {'required': True}, - } - - _attribute_map = { - 'nodes_value_type': {'key': 'nodesValueType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(AllNodes, self).__init__(**kwargs) - self.nodes_value_type = 'All' # type: str - - -class AmlComputeSchema(msrest.serialization.Model): - """Properties(top level) of AmlCompute. - - :ivar properties: Properties of AmlCompute. - :vartype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties - """ - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Properties of AmlCompute. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties - """ - super(AmlComputeSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class AmlCompute(Compute, AmlComputeSchema): - """An Azure Machine Learning compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar properties: Properties of AmlCompute. - :vartype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar compute_location: Location for the underlying compute. - :vartype compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar description: The description of the Machine Learning compute. - :vartype description: str - :ivar created_on: The time at which the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The time at which the compute was last modified. - :vartype modified_on: ~datetime.datetime - :ivar resource_id: ARM resource id of the underlying compute. - :vartype resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI - and AAD exclusively for authentication. - :vartype disable_local_auth: bool - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Properties of AmlCompute. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties - :keyword compute_location: Location for the underlying compute. - :paramtype compute_location: str - :keyword description: The description of the Machine Learning compute. - :paramtype description: str - :keyword resource_id: ARM resource id of the underlying compute. - :paramtype resource_id: str - :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only - MSI and AAD exclusively for authentication. - :paramtype disable_local_auth: bool - """ - super(AmlCompute, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'AmlCompute' # type: str - self.compute_location = kwargs.get('compute_location', None) - self.provisioning_state = None - self.description = kwargs.get('description', None) - self.created_on = None - self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) - self.provisioning_errors = None - self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) - - -class AmlComputeNodeInformation(msrest.serialization.Model): - """Compute node information related to a AmlCompute. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar node_id: ID of the compute node. - :vartype node_id: str - :ivar private_ip_address: Private IP address of the compute node. - :vartype private_ip_address: str - :ivar public_ip_address: Public IP address of the compute node. - :vartype public_ip_address: str - :ivar port: SSH port number of the node. - :vartype port: int - :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable, - leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable", - "leaving", "preempted". - :vartype node_state: str or ~azure.mgmt.machinelearningservices.models.NodeState - :ivar run_id: ID of the Experiment running on the node, if any else null. - :vartype run_id: str - """ - - _validation = { - 'node_id': {'readonly': True}, - 'private_ip_address': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'port': {'readonly': True}, - 'node_state': {'readonly': True}, - 'run_id': {'readonly': True}, - } - - _attribute_map = { - 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'port': {'key': 'port', 'type': 'int'}, - 'node_state': {'key': 'nodeState', 'type': 'str'}, - 'run_id': {'key': 'runId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(AmlComputeNodeInformation, self).__init__(**kwargs) - self.node_id = None - self.private_ip_address = None - self.public_ip_address = None - self.port = None - self.node_state = None - self.run_id = None - - -class AmlComputeNodesInformation(msrest.serialization.Model): - """Result of AmlCompute Nodes. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar nodes: The collection of returned AmlCompute nodes details. - :vartype nodes: list[~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformation] - :ivar next_link: The continuation token. - :vartype next_link: str - """ - - _validation = { - 'nodes': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(AmlComputeNodesInformation, self).__init__(**kwargs) - self.nodes = None - self.next_link = None - - -class AmlComputeProperties(msrest.serialization.Model): - """AML Compute properties. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value: - "Linux". - :vartype os_type: str or ~azure.mgmt.machinelearningservices.models.OsType - :ivar vm_size: Virtual Machine Size. - :vartype vm_size: str - :ivar vm_priority: Virtual Machine priority. Possible values include: "Dedicated", - "LowPriority". - :vartype vm_priority: str or ~azure.mgmt.machinelearningservices.models.VmPriority - :ivar virtual_machine_image: Virtual Machine image for AML Compute - windows only. - :vartype virtual_machine_image: ~azure.mgmt.machinelearningservices.models.VirtualMachineImage - :ivar isolated_network: Network is isolated or not. - :vartype isolated_network: bool - :ivar scale_settings: Scale settings for AML Compute. - :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings - :ivar user_account_credentials: Credentials for an administrator user account that will be - created on each compute node. - :vartype user_account_credentials: - ~azure.mgmt.machinelearningservices.models.UserAccountCredentials - :ivar subnet: Virtual network subnet resource ID the compute nodes belong to. - :vartype subnet: ~azure.mgmt.machinelearningservices.models.ResourceId - :ivar remote_login_port_public_access: State of the public SSH port. Possible values are: - Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled - - Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified - - Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined, - else is open all public nodes. It can be default only during cluster creation time, after - creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled", - "NotSpecified". Default value: "NotSpecified". - :vartype remote_login_port_public_access: str or - ~azure.mgmt.machinelearningservices.models.RemoteLoginPortPublicAccess - :ivar allocation_state: Allocation state of the compute. Possible values are: steady - - Indicates that the compute is not resizing. There are no changes to the number of compute nodes - in the compute in progress. A compute enters this state when it is created and when no - operations are being performed on the compute to change the number of compute nodes. resizing - - Indicates that the compute is resizing; that is, compute nodes are being added to or removed - from the compute. Possible values include: "Steady", "Resizing". - :vartype allocation_state: str or ~azure.mgmt.machinelearningservices.models.AllocationState - :ivar allocation_state_transition_time: The time at which the compute entered its current - allocation state. - :vartype allocation_state_transition_time: ~datetime.datetime - :ivar errors: Collection of errors encountered by various compute nodes during node setup. - :vartype errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar current_node_count: The number of compute nodes currently assigned to the compute. - :vartype current_node_count: int - :ivar target_node_count: The target number of compute nodes for the compute. If the - allocationState is resizing, this property denotes the target node count for the ongoing resize - operation. If the allocationState is steady, this property denotes the target node count for - the previous resize operation. - :vartype target_node_count: int - :ivar node_state_counts: Counts of various node states on the compute. - :vartype node_state_counts: ~azure.mgmt.machinelearningservices.models.NodeStateCounts - :ivar enable_node_public_ip: Enable or disable node public IP address provisioning. Possible - values are: Possible values are: true - Indicates that the compute nodes will have public IPs - provisioned. false - Indicates that the compute nodes will have a private endpoint and no - public IPs. - :vartype enable_node_public_ip: bool - :ivar property_bag: A property bag containing additional properties. - :vartype property_bag: any - """ - - _validation = { - 'allocation_state': {'readonly': True}, - 'allocation_state_transition_time': {'readonly': True}, - 'errors': {'readonly': True}, - 'current_node_count': {'readonly': True}, - 'target_node_count': {'readonly': True}, - 'node_state_counts': {'readonly': True}, - } - - _attribute_map = { - 'os_type': {'key': 'osType', 'type': 'str'}, - 'vm_size': {'key': 'vmSize', 'type': 'str'}, - 'vm_priority': {'key': 'vmPriority', 'type': 'str'}, - 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'VirtualMachineImage'}, - 'isolated_network': {'key': 'isolatedNetwork', 'type': 'bool'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, - 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'}, - 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, - 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'}, - 'allocation_state': {'key': 'allocationState', 'type': 'str'}, - 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'}, - 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, - 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'}, - 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, - 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'}, - 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'}, - 'property_bag': {'key': 'propertyBag', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value: - "Linux". - :paramtype os_type: str or ~azure.mgmt.machinelearningservices.models.OsType - :keyword vm_size: Virtual Machine Size. - :paramtype vm_size: str - :keyword vm_priority: Virtual Machine priority. Possible values include: "Dedicated", - "LowPriority". - :paramtype vm_priority: str or ~azure.mgmt.machinelearningservices.models.VmPriority - :keyword virtual_machine_image: Virtual Machine image for AML Compute - windows only. - :paramtype virtual_machine_image: - ~azure.mgmt.machinelearningservices.models.VirtualMachineImage - :keyword isolated_network: Network is isolated or not. - :paramtype isolated_network: bool - :keyword scale_settings: Scale settings for AML Compute. - :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings - :keyword user_account_credentials: Credentials for an administrator user account that will be - created on each compute node. - :paramtype user_account_credentials: - ~azure.mgmt.machinelearningservices.models.UserAccountCredentials - :keyword subnet: Virtual network subnet resource ID the compute nodes belong to. - :paramtype subnet: ~azure.mgmt.machinelearningservices.models.ResourceId - :keyword remote_login_port_public_access: State of the public SSH port. Possible values are: - Disabled - Indicates that the public ssh port is closed on all nodes of the cluster. Enabled - - Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified - - Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined, - else is open all public nodes. It can be default only during cluster creation time, after - creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled", - "NotSpecified". Default value: "NotSpecified". - :paramtype remote_login_port_public_access: str or - ~azure.mgmt.machinelearningservices.models.RemoteLoginPortPublicAccess - :keyword enable_node_public_ip: Enable or disable node public IP address provisioning. Possible - values are: Possible values are: true - Indicates that the compute nodes will have public IPs - provisioned. false - Indicates that the compute nodes will have a private endpoint and no - public IPs. - :paramtype enable_node_public_ip: bool - :keyword property_bag: A property bag containing additional properties. - :paramtype property_bag: any - """ - super(AmlComputeProperties, self).__init__(**kwargs) - self.os_type = kwargs.get('os_type', "Linux") - self.vm_size = kwargs.get('vm_size', None) - self.vm_priority = kwargs.get('vm_priority', None) - self.virtual_machine_image = kwargs.get('virtual_machine_image', None) - self.isolated_network = kwargs.get('isolated_network', None) - self.scale_settings = kwargs.get('scale_settings', None) - self.user_account_credentials = kwargs.get('user_account_credentials', None) - self.subnet = kwargs.get('subnet', None) - self.remote_login_port_public_access = kwargs.get('remote_login_port_public_access', "NotSpecified") - self.allocation_state = None - self.allocation_state_transition_time = None - self.errors = None - self.current_node_count = None - self.target_node_count = None - self.node_state_counts = None - self.enable_node_public_ip = kwargs.get('enable_node_public_ip', True) - self.property_bag = kwargs.get('property_bag', None) - - -class AmlOperation(msrest.serialization.Model): - """Azure Machine Learning team account REST API operation. - - :ivar display: Gets or sets display name of operation. - :vartype display: ~azure.mgmt.machinelearningservices.models.OperationDisplay - :ivar is_data_action: Indicates whether the operation applies to data-plane. - :vartype is_data_action: bool - :ivar name: Gets or sets operation name: {provider}/{resource}/{operation}. - :vartype name: str - :ivar origin: The intended executor of the operation: user/system. - :vartype origin: str - """ - - _attribute_map = { - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, - 'name': {'key': 'name', 'type': 'str'}, - 'origin': {'key': 'origin', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword display: Gets or sets display name of operation. - :paramtype display: ~azure.mgmt.machinelearningservices.models.OperationDisplay - :keyword is_data_action: Indicates whether the operation applies to data-plane. - :paramtype is_data_action: bool - :keyword name: Gets or sets operation name: {provider}/{resource}/{operation}. - :paramtype name: str - :keyword origin: The intended executor of the operation: user/system. - :paramtype origin: str - """ - super(AmlOperation, self).__init__(**kwargs) - self.display = kwargs.get('display', None) - self.is_data_action = kwargs.get('is_data_action', None) - self.name = kwargs.get('name', None) - self.origin = kwargs.get('origin', None) - - -class AmlOperationListResult(msrest.serialization.Model): - """An array of operations supported by the resource provider. - - :ivar value: Gets or sets list of AML team account operations supported by the - AML team account resource provider. - :vartype value: list[~azure.mgmt.machinelearningservices.models.AmlOperation] - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[AmlOperation]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword value: Gets or sets list of AML team account operations supported by the - AML team account resource provider. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.AmlOperation] - """ - super(AmlOperationListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class IdentityConfiguration(msrest.serialization.Model): - """Base definition for identity configuration. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmlToken, ManagedIdentity, UserIdentity. - - All required parameters must be populated in order to send to Azure. - - :ivar identity_type: Required. [Required] Specifies the type of identity framework.Constant - filled by server. Possible values include: "Managed", "AMLToken", "UserIdentity". - :vartype identity_type: str or - ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType - """ - - _validation = { - 'identity_type': {'required': True}, - } - - _attribute_map = { - 'identity_type': {'key': 'identityType', 'type': 'str'}, - } - - _subtype_map = { - 'identity_type': {'AMLToken': 'AmlToken', 'Managed': 'ManagedIdentity', 'UserIdentity': 'UserIdentity'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(IdentityConfiguration, self).__init__(**kwargs) - self.identity_type = None # type: Optional[str] - - -class AmlToken(IdentityConfiguration): - """AML Token identity configuration. - - All required parameters must be populated in order to send to Azure. - - :ivar identity_type: Required. [Required] Specifies the type of identity framework.Constant - filled by server. Possible values include: "Managed", "AMLToken", "UserIdentity". - :vartype identity_type: str or - ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType - """ - - _validation = { - 'identity_type': {'required': True}, - } - - _attribute_map = { - 'identity_type': {'key': 'identityType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(AmlToken, self).__init__(**kwargs) - self.identity_type = 'AMLToken' # type: str - - -class MonitorComputeIdentityBase(msrest.serialization.Model): - """Monitor compute identity base definition. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmlTokenComputeIdentity, ManagedComputeIdentity. - - All required parameters must be populated in order to send to Azure. - - :ivar compute_identity_type: Required. [Required] Monitor compute identity type enum.Constant - filled by server. Possible values include: "AmlToken", "ManagedIdentity". - :vartype compute_identity_type: str or - ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityType - """ - - _validation = { - 'compute_identity_type': {'required': True}, - } - - _attribute_map = { - 'compute_identity_type': {'key': 'computeIdentityType', 'type': 'str'}, - } - - _subtype_map = { - 'compute_identity_type': {'AmlToken': 'AmlTokenComputeIdentity', 'ManagedIdentity': 'ManagedComputeIdentity'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(MonitorComputeIdentityBase, self).__init__(**kwargs) - self.compute_identity_type = None # type: Optional[str] - - -class AmlTokenComputeIdentity(MonitorComputeIdentityBase): - """AML token compute identity definition. - - All required parameters must be populated in order to send to Azure. - - :ivar compute_identity_type: Required. [Required] Monitor compute identity type enum.Constant - filled by server. Possible values include: "AmlToken", "ManagedIdentity". - :vartype compute_identity_type: str or - ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityType - """ - - _validation = { - 'compute_identity_type': {'required': True}, - } - - _attribute_map = { - 'compute_identity_type': {'key': 'computeIdentityType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(AmlTokenComputeIdentity, self).__init__(**kwargs) - self.compute_identity_type = 'AmlToken' # type: str - - -class AmlUserFeature(msrest.serialization.Model): - """Features enabled for a workspace. - - :ivar id: Specifies the feature ID. - :vartype id: str - :ivar display_name: Specifies the feature name. - :vartype display_name: str - :ivar description: Describes the feature for user experience. - :vartype description: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword id: Specifies the feature ID. - :paramtype id: str - :keyword display_name: Specifies the feature name. - :paramtype display_name: str - :keyword description: Describes the feature for user experience. - :paramtype description: str - """ - super(AmlUserFeature, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.display_name = kwargs.get('display_name', None) - self.description = kwargs.get('description', None) - - -class ApiKeyAuthWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): - """This connection type covers the generic ApiKey auth connection categories, for examples: -AzureOpenAI: - Category:= AzureOpenAI - AuthType:= ApiKey (as type discriminator) - Credentials:= {ApiKey} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey - Target:= {ApiBase} - -CognitiveService: - Category:= CognitiveService - AuthType:= ApiKey (as type discriminator) - Credentials:= {SubscriptionKey} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey - Target:= ServiceRegion={serviceRegion} - -CognitiveSearch: - Category:= CognitiveSearch - AuthType:= ApiKey (as type discriminator) - Credentials:= {Key} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey - Target:= {Endpoint} - -Use Metadata property bag for ApiType, ApiVersion, Kind and other metadata fields. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar expiry_time: - :vartype expiry_time: ~datetime.datetime - :ivar metadata: Any object. - :vartype metadata: any - :ivar target: - :vartype target: str - :ivar credentials: Api key object for workspace connection credential. - :vartype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionApiKey - """ - - _validation = { - 'auth_type': {'required': True}, - } - - _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionApiKey'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :keyword expiry_time: - :paramtype expiry_time: ~datetime.datetime - :keyword metadata: Any object. - :paramtype metadata: any - :keyword target: - :paramtype target: str - :keyword credentials: Api key object for workspace connection credential. - :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionApiKey - """ - super(ApiKeyAuthWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'ApiKey' # type: str - self.credentials = kwargs.get('credentials', None) - - -class ArmResourceId(msrest.serialization.Model): - """ARM ResourceId of a resource. - - :ivar resource_id: Arm ResourceId is in the format - "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.Storage/storageAccounts/{StorageAccountName}" - or - "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{AcrName}". - :vartype resource_id: str - """ - - _attribute_map = { - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword resource_id: Arm ResourceId is in the format - "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.Storage/storageAccounts/{StorageAccountName}" - or - "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{AcrName}". - :paramtype resource_id: str - """ - super(ArmResourceId, self).__init__(**kwargs) - self.resource_id = kwargs.get('resource_id', None) - - -class ResourceBase(msrest.serialization.Model): - """ResourceBase. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - """ - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - """ - super(ResourceBase, self).__init__(**kwargs) - self.description = kwargs.get('description', None) - self.properties = kwargs.get('properties', None) - self.tags = kwargs.get('tags', None) - - -class AssetBase(ResourceBase): - """AssetBase. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :vartype is_archived: bool - """ - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :paramtype is_archived: bool - """ - super(AssetBase, self).__init__(**kwargs) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.is_anonymous = kwargs.get('is_anonymous', False) - self.is_archived = kwargs.get('is_archived', False) - - -class AssetContainer(ResourceBase): - """AssetContainer. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar latest_version: The latest version inside this container. - :vartype latest_version: str - :ivar next_version: The next auto incremental version. - :vartype next_version: str - """ - - _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - """ - super(AssetContainer, self).__init__(**kwargs) - self.is_archived = kwargs.get('is_archived', False) - self.latest_version = None - self.next_version = None - - -class AssetJobInput(msrest.serialization.Model): - """Asset input type. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str - """ - - _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str - """ - super(AssetJobInput, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.uri = kwargs['uri'] - - -class AssetJobOutput(msrest.serialization.Model): - """Asset output type. - - :ivar asset_name: Output Asset Name. - :vartype asset_name: str - :ivar asset_version: Output Asset Version. - :vartype asset_version: str - :ivar auto_delete_setting: Auto delete setting of output data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", - "Direct". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :ivar uri: Output Asset URI. - :vartype uri: str - """ - - _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword asset_name: Output Asset Name. - :paramtype asset_name: str - :keyword asset_version: Output Asset Version. - :paramtype asset_version: str - :keyword auto_delete_setting: Auto delete setting of output data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", - "Direct". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :keyword uri: Output Asset URI. - :paramtype uri: str - """ - super(AssetJobOutput, self).__init__(**kwargs) - self.asset_name = kwargs.get('asset_name', None) - self.asset_version = kwargs.get('asset_version', None) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.mode = kwargs.get('mode', None) - self.uri = kwargs.get('uri', None) - - -class AssetReferenceBase(msrest.serialization.Model): - """Base definition for asset references. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DataPathAssetReference, IdAssetReference, OutputPathAssetReference. - - All required parameters must be populated in order to send to Azure. - - :ivar reference_type: Required. [Required] Specifies the type of asset reference.Constant - filled by server. Possible values include: "Id", "DataPath", "OutputPath". - :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType - """ - - _validation = { - 'reference_type': {'required': True}, - } - - _attribute_map = { - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - } - - _subtype_map = { - 'reference_type': {'DataPath': 'DataPathAssetReference', 'Id': 'IdAssetReference', 'OutputPath': 'OutputPathAssetReference'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(AssetReferenceBase, self).__init__(**kwargs) - self.reference_type = None # type: Optional[str] - - -class AssignedUser(msrest.serialization.Model): - """A user that can be assigned to a compute instance. - - All required parameters must be populated in order to send to Azure. - - :ivar object_id: Required. User’s AAD Object Id. - :vartype object_id: str - :ivar tenant_id: Required. User’s AAD Tenant Id. - :vartype tenant_id: str - """ - - _validation = { - 'object_id': {'required': True}, - 'tenant_id': {'required': True}, - } - - _attribute_map = { - 'object_id': {'key': 'objectId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword object_id: Required. User’s AAD Object Id. - :paramtype object_id: str - :keyword tenant_id: Required. User’s AAD Tenant Id. - :paramtype tenant_id: str - """ - super(AssignedUser, self).__init__(**kwargs) - self.object_id = kwargs['object_id'] - self.tenant_id = kwargs['tenant_id'] - - -class AutoDeleteSetting(msrest.serialization.Model): - """AutoDeleteSetting. - - :ivar condition: When to check if an asset is expired. Possible values include: - "CreatedGreaterThan", "LastAccessedGreaterThan". - :vartype condition: str or ~azure.mgmt.machinelearningservices.models.AutoDeleteCondition - :ivar value: Expiration condition value. - :vartype value: str - """ - - _attribute_map = { - 'condition': {'key': 'condition', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword condition: When to check if an asset is expired. Possible values include: - "CreatedGreaterThan", "LastAccessedGreaterThan". - :paramtype condition: str or ~azure.mgmt.machinelearningservices.models.AutoDeleteCondition - :keyword value: Expiration condition value. - :paramtype value: str - """ - super(AutoDeleteSetting, self).__init__(**kwargs) - self.condition = kwargs.get('condition', None) - self.value = kwargs.get('value', None) - - -class ForecastHorizon(msrest.serialization.Model): - """The desired maximum forecast horizon in units of time-series frequency. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AutoForecastHorizon, CustomForecastHorizon. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] Set forecast horizon value selection mode.Constant filled by - server. Possible values include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.ForecastHorizonMode - """ - - _validation = { - 'mode': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - } - - _subtype_map = { - 'mode': {'Auto': 'AutoForecastHorizon', 'Custom': 'CustomForecastHorizon'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ForecastHorizon, self).__init__(**kwargs) - self.mode = None # type: Optional[str] - - -class AutoForecastHorizon(ForecastHorizon): - """Forecast horizon determined automatically by system. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] Set forecast horizon value selection mode.Constant filled by - server. Possible values include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.ForecastHorizonMode - """ - - _validation = { - 'mode': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(AutoForecastHorizon, self).__init__(**kwargs) - self.mode = 'Auto' # type: str - - -class AutologgerSettings(msrest.serialization.Model): - """Settings for Autologger. - - All required parameters must be populated in order to send to Azure. - - :ivar mlflow_autologger: Required. [Required] Indicates whether mlflow autologger is enabled. - Possible values include: "Enabled", "Disabled". - :vartype mlflow_autologger: str or - ~azure.mgmt.machinelearningservices.models.MLFlowAutologgerState - """ - - _validation = { - 'mlflow_autologger': {'required': True}, - } - - _attribute_map = { - 'mlflow_autologger': {'key': 'mlflowAutologger', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mlflow_autologger: Required. [Required] Indicates whether mlflow autologger is - enabled. Possible values include: "Enabled", "Disabled". - :paramtype mlflow_autologger: str or - ~azure.mgmt.machinelearningservices.models.MLFlowAutologgerState - """ - super(AutologgerSettings, self).__init__(**kwargs) - self.mlflow_autologger = kwargs['mlflow_autologger'] - - -class JobBaseProperties(ResourceBase): - """Base definition for a job. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AutoMLJob, CommandJob, LabelingJobProperties, PipelineJob, SparkJob, SweepJob. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar component_id: ARM resource ID of the component resource. - :vartype component_id: str - :ivar compute_id: ARM resource ID of the compute resource. - :vartype compute_id: str - :ivar display_name: Display name of job. - :vartype display_name: str - :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :vartype experiment_name: str - :ivar identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". - :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType - :ivar notification_setting: Notification setting for the job. - :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :ivar secrets_configuration: Configuration for secrets to be made available during runtime. - :vartype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :ivar services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus - """ - - _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - _subtype_map = { - 'job_type': {'AutoML': 'AutoMLJob', 'Command': 'CommandJob', 'Labeling': 'LabelingJobProperties', 'Pipeline': 'PipelineJob', 'Spark': 'SparkJob', 'Sweep': 'SweepJob'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword component_id: ARM resource ID of the component resource. - :paramtype component_id: str - :keyword compute_id: ARM resource ID of the compute resource. - :paramtype compute_id: str - :keyword display_name: Display name of job. - :paramtype display_name: str - :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :paramtype experiment_name: str - :keyword identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - :keyword notification_setting: Notification setting for the job. - :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :keyword secrets_configuration: Configuration for secrets to be made available during runtime. - :paramtype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :keyword services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - """ - super(JobBaseProperties, self).__init__(**kwargs) - self.component_id = kwargs.get('component_id', None) - self.compute_id = kwargs.get('compute_id', None) - self.display_name = kwargs.get('display_name', None) - self.experiment_name = kwargs.get('experiment_name', "Default") - self.identity = kwargs.get('identity', None) - self.is_archived = kwargs.get('is_archived', False) - self.job_type = 'JobBaseProperties' # type: str - self.notification_setting = kwargs.get('notification_setting', None) - self.secrets_configuration = kwargs.get('secrets_configuration', None) - self.services = kwargs.get('services', None) - self.status = None - - -class AutoMLJob(JobBaseProperties): - """AutoMLJob class. -Use this class for executing AutoML tasks like Classification/Regression etc. -See TaskType enum for all the tasks supported. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar component_id: ARM resource ID of the component resource. - :vartype component_id: str - :ivar compute_id: ARM resource ID of the compute resource. - :vartype compute_id: str - :ivar display_name: Display name of job. - :vartype display_name: str - :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :vartype experiment_name: str - :ivar identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". - :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType - :ivar notification_setting: Notification setting for the job. - :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :ivar secrets_configuration: Configuration for secrets to be made available during runtime. - :vartype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :ivar services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus - :ivar environment_id: The ARM resource ID of the Environment specification for the job. - This is optional value to provide, if not provided, AutoML will default this to Production - AutoML curated environment version when running the job. - :vartype environment_id: str - :ivar environment_variables: Environment variables included in the job. - :vartype environment_variables: dict[str, str] - :ivar outputs: Mapping of output data bindings used in the job. - :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] - :ivar queue_settings: Queue settings for the job. - :vartype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings - :ivar resources: Compute Resource configuration for the job. - :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration - :ivar task_details: Required. [Required] This represents scenario which can be one of - Tables/NLP/Image. - :vartype task_details: ~azure.mgmt.machinelearningservices.models.AutoMLVertical - """ - - _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'task_details': {'required': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'queue_settings': {'key': 'queueSettings', 'type': 'QueueSettings'}, - 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, - 'task_details': {'key': 'taskDetails', 'type': 'AutoMLVertical'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword component_id: ARM resource ID of the component resource. - :paramtype component_id: str - :keyword compute_id: ARM resource ID of the compute resource. - :paramtype compute_id: str - :keyword display_name: Display name of job. - :paramtype display_name: str - :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :paramtype experiment_name: str - :keyword identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - :keyword notification_setting: Notification setting for the job. - :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :keyword secrets_configuration: Configuration for secrets to be made available during runtime. - :paramtype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :keyword services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :keyword environment_id: The ARM resource ID of the Environment specification for the job. - This is optional value to provide, if not provided, AutoML will default this to Production - AutoML curated environment version when running the job. - :paramtype environment_id: str - :keyword environment_variables: Environment variables included in the job. - :paramtype environment_variables: dict[str, str] - :keyword outputs: Mapping of output data bindings used in the job. - :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] - :keyword queue_settings: Queue settings for the job. - :paramtype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings - :keyword resources: Compute Resource configuration for the job. - :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration - :keyword task_details: Required. [Required] This represents scenario which can be one of - Tables/NLP/Image. - :paramtype task_details: ~azure.mgmt.machinelearningservices.models.AutoMLVertical - """ - super(AutoMLJob, self).__init__(**kwargs) - self.job_type = 'AutoML' # type: str - self.environment_id = kwargs.get('environment_id', None) - self.environment_variables = kwargs.get('environment_variables', None) - self.outputs = kwargs.get('outputs', None) - self.queue_settings = kwargs.get('queue_settings', None) - self.resources = kwargs.get('resources', None) - self.task_details = kwargs['task_details'] - - -class AutoMLVertical(msrest.serialization.Model): - """AutoML vertical class. -Base class for AutoML verticals - TableVertical/ImageVertical/NLPVertical. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: Classification, Forecasting, ImageClassification, ImageClassificationMultilabel, ImageInstanceSegmentation, ImageObjectDetection, Regression, TextClassification, TextClassificationMultilabel, TextNer. - - All required parameters must be populated in order to send to Azure. - - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - """ - - _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, - } - - _attribute_map = { - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - } - - _subtype_map = { - 'task_type': {'Classification': 'Classification', 'Forecasting': 'Forecasting', 'ImageClassification': 'ImageClassification', 'ImageClassificationMultilabel': 'ImageClassificationMultilabel', 'ImageInstanceSegmentation': 'ImageInstanceSegmentation', 'ImageObjectDetection': 'ImageObjectDetection', 'Regression': 'Regression', 'TextClassification': 'TextClassification', 'TextClassificationMultilabel': 'TextClassificationMultilabel', 'TextNER': 'TextNer'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - """ - super(AutoMLVertical, self).__init__(**kwargs) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.task_type = None # type: Optional[str] - self.training_data = kwargs['training_data'] - - -class NCrossValidations(msrest.serialization.Model): - """N-Cross validations value. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AutoNCrossValidations, CustomNCrossValidations. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] Mode for determining N-Cross validations.Constant filled by - server. Possible values include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.NCrossValidationsMode - """ - - _validation = { - 'mode': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - } - - _subtype_map = { - 'mode': {'Auto': 'AutoNCrossValidations', 'Custom': 'CustomNCrossValidations'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(NCrossValidations, self).__init__(**kwargs) - self.mode = None # type: Optional[str] - - -class AutoNCrossValidations(NCrossValidations): - """N-Cross validations determined automatically. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] Mode for determining N-Cross validations.Constant filled by - server. Possible values include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.NCrossValidationsMode - """ - - _validation = { - 'mode': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(AutoNCrossValidations, self).__init__(**kwargs) - self.mode = 'Auto' # type: str - - -class AutoPauseProperties(msrest.serialization.Model): - """Auto pause properties. - - :ivar delay_in_minutes: - :vartype delay_in_minutes: int - :ivar enabled: - :vartype enabled: bool - """ - - _attribute_map = { - 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'}, - 'enabled': {'key': 'enabled', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword delay_in_minutes: - :paramtype delay_in_minutes: int - :keyword enabled: - :paramtype enabled: bool - """ - super(AutoPauseProperties, self).__init__(**kwargs) - self.delay_in_minutes = kwargs.get('delay_in_minutes', None) - self.enabled = kwargs.get('enabled', None) - - -class AutoScaleProperties(msrest.serialization.Model): - """Auto scale properties. - - :ivar min_node_count: - :vartype min_node_count: int - :ivar enabled: - :vartype enabled: bool - :ivar max_node_count: - :vartype max_node_count: int - """ - - _attribute_map = { - 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, - 'enabled': {'key': 'enabled', 'type': 'bool'}, - 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword min_node_count: - :paramtype min_node_count: int - :keyword enabled: - :paramtype enabled: bool - :keyword max_node_count: - :paramtype max_node_count: int - """ - super(AutoScaleProperties, self).__init__(**kwargs) - self.min_node_count = kwargs.get('min_node_count', None) - self.enabled = kwargs.get('enabled', None) - self.max_node_count = kwargs.get('max_node_count', None) - - -class Seasonality(msrest.serialization.Model): - """Forecasting seasonality. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AutoSeasonality, CustomSeasonality. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] Seasonality mode.Constant filled by server. Possible values - include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.SeasonalityMode - """ - - _validation = { - 'mode': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - } - - _subtype_map = { - 'mode': {'Auto': 'AutoSeasonality', 'Custom': 'CustomSeasonality'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(Seasonality, self).__init__(**kwargs) - self.mode = None # type: Optional[str] - - -class AutoSeasonality(Seasonality): - """AutoSeasonality. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] Seasonality mode.Constant filled by server. Possible values - include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.SeasonalityMode - """ - - _validation = { - 'mode': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(AutoSeasonality, self).__init__(**kwargs) - self.mode = 'Auto' # type: str - - -class TargetLags(msrest.serialization.Model): - """The number of past periods to lag from the target column. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AutoTargetLags, CustomTargetLags. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] Set target lags mode - Auto/Custom.Constant filled by server. - Possible values include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetLagsMode - """ - - _validation = { - 'mode': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - } - - _subtype_map = { - 'mode': {'Auto': 'AutoTargetLags', 'Custom': 'CustomTargetLags'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(TargetLags, self).__init__(**kwargs) - self.mode = None # type: Optional[str] - - -class AutoTargetLags(TargetLags): - """AutoTargetLags. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] Set target lags mode - Auto/Custom.Constant filled by server. - Possible values include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetLagsMode - """ - - _validation = { - 'mode': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(AutoTargetLags, self).__init__(**kwargs) - self.mode = 'Auto' # type: str - - -class TargetRollingWindowSize(msrest.serialization.Model): - """Forecasting target rolling window size. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AutoTargetRollingWindowSize, CustomTargetRollingWindowSize. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] TargetRollingWindowSiz detection mode.Constant filled by - server. Possible values include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSizeMode - """ - - _validation = { - 'mode': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - } - - _subtype_map = { - 'mode': {'Auto': 'AutoTargetRollingWindowSize', 'Custom': 'CustomTargetRollingWindowSize'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(TargetRollingWindowSize, self).__init__(**kwargs) - self.mode = None # type: Optional[str] - - -class AutoTargetRollingWindowSize(TargetRollingWindowSize): - """Target lags rolling window determined automatically. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] TargetRollingWindowSiz detection mode.Constant filled by - server. Possible values include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSizeMode - """ - - _validation = { - 'mode': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(AutoTargetRollingWindowSize, self).__init__(**kwargs) - self.mode = 'Auto' # type: str - - -class MonitoringAlertNotificationSettingsBase(msrest.serialization.Model): - """MonitoringAlertNotificationSettingsBase. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzMonMonitoringAlertNotificationSettings, EmailMonitoringAlertNotificationSettings. - - All required parameters must be populated in order to send to Azure. - - :ivar alert_notification_type: Required. [Required] Specifies the type of signal to - monitor.Constant filled by server. Possible values include: "AzureMonitor", "Email". - :vartype alert_notification_type: str or - ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationType - """ - - _validation = { - 'alert_notification_type': {'required': True}, - } - - _attribute_map = { - 'alert_notification_type': {'key': 'alertNotificationType', 'type': 'str'}, - } - - _subtype_map = { - 'alert_notification_type': {'AzureMonitor': 'AzMonMonitoringAlertNotificationSettings', 'Email': 'EmailMonitoringAlertNotificationSettings'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(MonitoringAlertNotificationSettingsBase, self).__init__(**kwargs) - self.alert_notification_type = None # type: Optional[str] - - -class AzMonMonitoringAlertNotificationSettings(MonitoringAlertNotificationSettingsBase): - """AzMonMonitoringAlertNotificationSettings. - - All required parameters must be populated in order to send to Azure. - - :ivar alert_notification_type: Required. [Required] Specifies the type of signal to - monitor.Constant filled by server. Possible values include: "AzureMonitor", "Email". - :vartype alert_notification_type: str or - ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationType - """ - - _validation = { - 'alert_notification_type': {'required': True}, - } - - _attribute_map = { - 'alert_notification_type': {'key': 'alertNotificationType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(AzMonMonitoringAlertNotificationSettings, self).__init__(**kwargs) - self.alert_notification_type = 'AzureMonitor' # type: str - - -class AzureDatastore(msrest.serialization.Model): - """Base definition for Azure datastore contents configuration. - - :ivar resource_group: Azure Resource Group name. - :vartype resource_group: str - :ivar subscription_id: Azure Subscription Id. - :vartype subscription_id: str - """ - - _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword resource_group: Azure Resource Group name. - :paramtype resource_group: str - :keyword subscription_id: Azure Subscription Id. - :paramtype subscription_id: str - """ - super(AzureDatastore, self).__init__(**kwargs) - self.resource_group = kwargs.get('resource_group', None) - self.subscription_id = kwargs.get('subscription_id', None) - - -class DatastoreProperties(ResourceBase): - """Base definition for datastore contents configuration. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureBlobDatastore, AzureDataLakeGen1Datastore, AzureDataLakeGen2Datastore, AzureFileDatastore, HdfsDatastore, OneLakeDatastore. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar credentials: Required. [Required] Account credentials. - :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled - by server. Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", - "AzureFile", "Hdfs", "OneLake". - :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType - :ivar intellectual_property: Intellectual Property details. - :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar is_default: Readonly property to indicate if datastore is the workspace default - datastore. - :vartype is_default: bool - """ - - _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - } - - _subtype_map = { - 'datastore_type': {'AzureBlob': 'AzureBlobDatastore', 'AzureDataLakeGen1': 'AzureDataLakeGen1Datastore', 'AzureDataLakeGen2': 'AzureDataLakeGen2Datastore', 'AzureFile': 'AzureFileDatastore', 'Hdfs': 'HdfsDatastore', 'OneLake': 'OneLakeDatastore'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword credentials: Required. [Required] Account credentials. - :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :keyword intellectual_property: Intellectual Property details. - :paramtype intellectual_property: - ~azure.mgmt.machinelearningservices.models.IntellectualProperty - """ - super(DatastoreProperties, self).__init__(**kwargs) - self.credentials = kwargs['credentials'] - self.datastore_type = 'DatastoreProperties' # type: str - self.intellectual_property = kwargs.get('intellectual_property', None) - self.is_default = None - - -class AzureBlobDatastore(DatastoreProperties, AzureDatastore): - """Azure Blob datastore configuration. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar resource_group: Azure Resource Group name. - :vartype resource_group: str - :ivar subscription_id: Azure Subscription Id. - :vartype subscription_id: str - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar credentials: Required. [Required] Account credentials. - :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled - by server. Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", - "AzureFile", "Hdfs", "OneLake". - :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType - :ivar intellectual_property: Intellectual Property details. - :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar is_default: Readonly property to indicate if datastore is the workspace default - datastore. - :vartype is_default: bool - :ivar account_name: Storage account name. - :vartype account_name: str - :ivar container_name: Storage account container name. - :vartype container_name: str - :ivar endpoint: Azure cloud endpoint for the storage account. - :vartype endpoint: str - :ivar protocol: Protocol used to communicate with the storage account. - :vartype protocol: str - :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". - :vartype service_data_access_auth_identity: str or - ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity - """ - - _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - } - - _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'container_name': {'key': 'containerName', 'type': 'str'}, - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword resource_group: Azure Resource Group name. - :paramtype resource_group: str - :keyword subscription_id: Azure Subscription Id. - :paramtype subscription_id: str - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword credentials: Required. [Required] Account credentials. - :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :keyword intellectual_property: Intellectual Property details. - :paramtype intellectual_property: - ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword account_name: Storage account name. - :paramtype account_name: str - :keyword container_name: Storage account container name. - :paramtype container_name: str - :keyword endpoint: Azure cloud endpoint for the storage account. - :paramtype endpoint: str - :keyword protocol: Protocol used to communicate with the storage account. - :paramtype protocol: str - :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". - :paramtype service_data_access_auth_identity: str or - ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity - """ - super(AzureBlobDatastore, self).__init__(**kwargs) - self.resource_group = kwargs.get('resource_group', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.datastore_type = 'AzureBlob' # type: str - self.account_name = kwargs.get('account_name', None) - self.container_name = kwargs.get('container_name', None) - self.endpoint = kwargs.get('endpoint', None) - self.protocol = kwargs.get('protocol', None) - self.service_data_access_auth_identity = kwargs.get('service_data_access_auth_identity', None) - self.description = kwargs.get('description', None) - self.properties = kwargs.get('properties', None) - self.tags = kwargs.get('tags', None) - self.credentials = kwargs['credentials'] - self.intellectual_property = kwargs.get('intellectual_property', None) - self.is_default = None - - -class AzureDataLakeGen1Datastore(DatastoreProperties, AzureDatastore): - """Azure Data Lake Gen1 datastore configuration. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar resource_group: Azure Resource Group name. - :vartype resource_group: str - :ivar subscription_id: Azure Subscription Id. - :vartype subscription_id: str - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar credentials: Required. [Required] Account credentials. - :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled - by server. Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", - "AzureFile", "Hdfs", "OneLake". - :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType - :ivar intellectual_property: Intellectual Property details. - :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar is_default: Readonly property to indicate if datastore is the workspace default - datastore. - :vartype is_default: bool - :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". - :vartype service_data_access_auth_identity: str or - ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity - :ivar store_name: Required. [Required] Azure Data Lake store name. - :vartype store_name: str - """ - - _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'store_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, - 'store_name': {'key': 'storeName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword resource_group: Azure Resource Group name. - :paramtype resource_group: str - :keyword subscription_id: Azure Subscription Id. - :paramtype subscription_id: str - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword credentials: Required. [Required] Account credentials. - :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :keyword intellectual_property: Intellectual Property details. - :paramtype intellectual_property: - ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". - :paramtype service_data_access_auth_identity: str or - ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity - :keyword store_name: Required. [Required] Azure Data Lake store name. - :paramtype store_name: str - """ - super(AzureDataLakeGen1Datastore, self).__init__(**kwargs) - self.resource_group = kwargs.get('resource_group', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.datastore_type = 'AzureDataLakeGen1' # type: str - self.service_data_access_auth_identity = kwargs.get('service_data_access_auth_identity', None) - self.store_name = kwargs['store_name'] - self.description = kwargs.get('description', None) - self.properties = kwargs.get('properties', None) - self.tags = kwargs.get('tags', None) - self.credentials = kwargs['credentials'] - self.intellectual_property = kwargs.get('intellectual_property', None) - self.is_default = None - - -class AzureDataLakeGen2Datastore(DatastoreProperties, AzureDatastore): - """Azure Data Lake Gen2 datastore configuration. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar resource_group: Azure Resource Group name. - :vartype resource_group: str - :ivar subscription_id: Azure Subscription Id. - :vartype subscription_id: str - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar credentials: Required. [Required] Account credentials. - :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled - by server. Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", - "AzureFile", "Hdfs", "OneLake". - :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType - :ivar intellectual_property: Intellectual Property details. - :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar is_default: Readonly property to indicate if datastore is the workspace default - datastore. - :vartype is_default: bool - :ivar account_name: Required. [Required] Storage account name. - :vartype account_name: str - :ivar endpoint: Azure cloud endpoint for the storage account. - :vartype endpoint: str - :ivar filesystem: Required. [Required] The name of the Data Lake Gen2 filesystem. - :vartype filesystem: str - :ivar protocol: Protocol used to communicate with the storage account. - :vartype protocol: str - :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". - :vartype service_data_access_auth_identity: str or - ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity - """ - - _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'account_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'filesystem': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'filesystem': {'key': 'filesystem', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword resource_group: Azure Resource Group name. - :paramtype resource_group: str - :keyword subscription_id: Azure Subscription Id. - :paramtype subscription_id: str - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword credentials: Required. [Required] Account credentials. - :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :keyword intellectual_property: Intellectual Property details. - :paramtype intellectual_property: - ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword account_name: Required. [Required] Storage account name. - :paramtype account_name: str - :keyword endpoint: Azure cloud endpoint for the storage account. - :paramtype endpoint: str - :keyword filesystem: Required. [Required] The name of the Data Lake Gen2 filesystem. - :paramtype filesystem: str - :keyword protocol: Protocol used to communicate with the storage account. - :paramtype protocol: str - :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". - :paramtype service_data_access_auth_identity: str or - ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity - """ - super(AzureDataLakeGen2Datastore, self).__init__(**kwargs) - self.resource_group = kwargs.get('resource_group', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.datastore_type = 'AzureDataLakeGen2' # type: str - self.account_name = kwargs['account_name'] - self.endpoint = kwargs.get('endpoint', None) - self.filesystem = kwargs['filesystem'] - self.protocol = kwargs.get('protocol', None) - self.service_data_access_auth_identity = kwargs.get('service_data_access_auth_identity', None) - self.description = kwargs.get('description', None) - self.properties = kwargs.get('properties', None) - self.tags = kwargs.get('tags', None) - self.credentials = kwargs['credentials'] - self.intellectual_property = kwargs.get('intellectual_property', None) - self.is_default = None - - -class Webhook(msrest.serialization.Model): - """Webhook base. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDevOpsWebhook. - - All required parameters must be populated in order to send to Azure. - - :ivar event_type: Send callback on a specified notification event. - :vartype event_type: str - :ivar webhook_type: Required. [Required] Specifies the type of service to send a - callback.Constant filled by server. Possible values include: "AzureDevOps". - :vartype webhook_type: str or ~azure.mgmt.machinelearningservices.models.WebhookType - """ - - _validation = { - 'webhook_type': {'required': True}, - } - - _attribute_map = { - 'event_type': {'key': 'eventType', 'type': 'str'}, - 'webhook_type': {'key': 'webhookType', 'type': 'str'}, - } - - _subtype_map = { - 'webhook_type': {'AzureDevOps': 'AzureDevOpsWebhook'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword event_type: Send callback on a specified notification event. - :paramtype event_type: str - """ - super(Webhook, self).__init__(**kwargs) - self.event_type = kwargs.get('event_type', None) - self.webhook_type = None # type: Optional[str] - - -class AzureDevOpsWebhook(Webhook): - """Webhook details specific for Azure DevOps. - - All required parameters must be populated in order to send to Azure. - - :ivar event_type: Send callback on a specified notification event. - :vartype event_type: str - :ivar webhook_type: Required. [Required] Specifies the type of service to send a - callback.Constant filled by server. Possible values include: "AzureDevOps". - :vartype webhook_type: str or ~azure.mgmt.machinelearningservices.models.WebhookType - """ - - _validation = { - 'webhook_type': {'required': True}, - } - - _attribute_map = { - 'event_type': {'key': 'eventType', 'type': 'str'}, - 'webhook_type': {'key': 'webhookType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword event_type: Send callback on a specified notification event. - :paramtype event_type: str - """ - super(AzureDevOpsWebhook, self).__init__(**kwargs) - self.webhook_type = 'AzureDevOps' # type: str - - -class AzureFileDatastore(DatastoreProperties, AzureDatastore): - """Azure File datastore configuration. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar resource_group: Azure Resource Group name. - :vartype resource_group: str - :ivar subscription_id: Azure Subscription Id. - :vartype subscription_id: str - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar credentials: Required. [Required] Account credentials. - :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled - by server. Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", - "AzureFile", "Hdfs", "OneLake". - :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType - :ivar intellectual_property: Intellectual Property details. - :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar is_default: Readonly property to indicate if datastore is the workspace default - datastore. - :vartype is_default: bool - :ivar account_name: Required. [Required] Storage account name. - :vartype account_name: str - :ivar endpoint: Azure cloud endpoint for the storage account. - :vartype endpoint: str - :ivar file_share_name: Required. [Required] The name of the Azure file share that the datastore - points to. - :vartype file_share_name: str - :ivar protocol: Protocol used to communicate with the storage account. - :vartype protocol: str - :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". - :vartype service_data_access_auth_identity: str or - ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity - """ - - _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'account_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'file_share_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'file_share_name': {'key': 'fileShareName', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword resource_group: Azure Resource Group name. - :paramtype resource_group: str - :keyword subscription_id: Azure Subscription Id. - :paramtype subscription_id: str - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword credentials: Required. [Required] Account credentials. - :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :keyword intellectual_property: Intellectual Property details. - :paramtype intellectual_property: - ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword account_name: Required. [Required] Storage account name. - :paramtype account_name: str - :keyword endpoint: Azure cloud endpoint for the storage account. - :paramtype endpoint: str - :keyword file_share_name: Required. [Required] The name of the Azure file share that the - datastore points to. - :paramtype file_share_name: str - :keyword protocol: Protocol used to communicate with the storage account. - :paramtype protocol: str - :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". - :paramtype service_data_access_auth_identity: str or - ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity - """ - super(AzureFileDatastore, self).__init__(**kwargs) - self.resource_group = kwargs.get('resource_group', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.datastore_type = 'AzureFile' # type: str - self.account_name = kwargs['account_name'] - self.endpoint = kwargs.get('endpoint', None) - self.file_share_name = kwargs['file_share_name'] - self.protocol = kwargs.get('protocol', None) - self.service_data_access_auth_identity = kwargs.get('service_data_access_auth_identity', None) - self.description = kwargs.get('description', None) - self.properties = kwargs.get('properties', None) - self.tags = kwargs.get('tags', None) - self.credentials = kwargs['credentials'] - self.intellectual_property = kwargs.get('intellectual_property', None) - self.is_default = None - - -class InferencingServer(msrest.serialization.Model): - """InferencingServer. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureMLBatchInferencingServer, AzureMLOnlineInferencingServer, CustomInferencingServer, TritonInferencingServer. - - All required parameters must be populated in order to send to Azure. - - :ivar server_type: Required. [Required] Inferencing server type for various targets.Constant - filled by server. Possible values include: "AzureMLOnline", "AzureMLBatch", "Triton", "Custom". - :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType - """ - - _validation = { - 'server_type': {'required': True}, - } - - _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, - } - - _subtype_map = { - 'server_type': {'AzureMLBatch': 'AzureMLBatchInferencingServer', 'AzureMLOnline': 'AzureMLOnlineInferencingServer', 'Custom': 'CustomInferencingServer', 'Triton': 'TritonInferencingServer'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(InferencingServer, self).__init__(**kwargs) - self.server_type = None # type: Optional[str] - - -class AzureMLBatchInferencingServer(InferencingServer): - """Azure ML batch inferencing server configurations. - - All required parameters must be populated in order to send to Azure. - - :ivar server_type: Required. [Required] Inferencing server type for various targets.Constant - filled by server. Possible values include: "AzureMLOnline", "AzureMLBatch", "Triton", "Custom". - :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType - :ivar code_configuration: Code configuration for AML batch inferencing server. - :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - """ - - _validation = { - 'server_type': {'required': True}, - } - - _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword code_configuration: Code configuration for AML batch inferencing server. - :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - """ - super(AzureMLBatchInferencingServer, self).__init__(**kwargs) - self.server_type = 'AzureMLBatch' # type: str - self.code_configuration = kwargs.get('code_configuration', None) - - -class AzureMLOnlineInferencingServer(InferencingServer): - """Azure ML online inferencing configurations. - - All required parameters must be populated in order to send to Azure. - - :ivar server_type: Required. [Required] Inferencing server type for various targets.Constant - filled by server. Possible values include: "AzureMLOnline", "AzureMLBatch", "Triton", "Custom". - :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType - :ivar code_configuration: Code configuration for AML inferencing server. - :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - """ - - _validation = { - 'server_type': {'required': True}, - } - - _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword code_configuration: Code configuration for AML inferencing server. - :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - """ - super(AzureMLOnlineInferencingServer, self).__init__(**kwargs) - self.server_type = 'AzureMLOnline' # type: str - self.code_configuration = kwargs.get('code_configuration', None) - - -class EarlyTerminationPolicy(msrest.serialization.Model): - """Early termination policies enable canceling poor-performing runs before they complete. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BanditPolicy, MedianStoppingPolicy, TruncationSelectionPolicy. - - All required parameters must be populated in order to send to Azure. - - :ivar delay_evaluation: Number of intervals by which to delay the first evaluation. - :vartype delay_evaluation: int - :ivar evaluation_interval: Interval (number of runs) between policy evaluations. - :vartype evaluation_interval: int - :ivar policy_type: Required. [Required] Name of policy configuration.Constant filled by server. - Possible values include: "Bandit", "MedianStopping", "TruncationSelection". - :vartype policy_type: str or - ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType - """ - - _validation = { - 'policy_type': {'required': True}, - } - - _attribute_map = { - 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, - 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, - 'policy_type': {'key': 'policyType', 'type': 'str'}, - } - - _subtype_map = { - 'policy_type': {'Bandit': 'BanditPolicy', 'MedianStopping': 'MedianStoppingPolicy', 'TruncationSelection': 'TruncationSelectionPolicy'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. - :paramtype delay_evaluation: int - :keyword evaluation_interval: Interval (number of runs) between policy evaluations. - :paramtype evaluation_interval: int - """ - super(EarlyTerminationPolicy, self).__init__(**kwargs) - self.delay_evaluation = kwargs.get('delay_evaluation', 0) - self.evaluation_interval = kwargs.get('evaluation_interval', 0) - self.policy_type = None # type: Optional[str] - - -class BanditPolicy(EarlyTerminationPolicy): - """Defines an early termination policy based on slack criteria, and a frequency and delay interval for evaluation. - - All required parameters must be populated in order to send to Azure. - - :ivar delay_evaluation: Number of intervals by which to delay the first evaluation. - :vartype delay_evaluation: int - :ivar evaluation_interval: Interval (number of runs) between policy evaluations. - :vartype evaluation_interval: int - :ivar policy_type: Required. [Required] Name of policy configuration.Constant filled by server. - Possible values include: "Bandit", "MedianStopping", "TruncationSelection". - :vartype policy_type: str or - ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType - :ivar slack_amount: Absolute distance allowed from the best performing run. - :vartype slack_amount: float - :ivar slack_factor: Ratio of the allowed distance from the best performing run. - :vartype slack_factor: float - """ - - _validation = { - 'policy_type': {'required': True}, - } - - _attribute_map = { - 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, - 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, - 'policy_type': {'key': 'policyType', 'type': 'str'}, - 'slack_amount': {'key': 'slackAmount', 'type': 'float'}, - 'slack_factor': {'key': 'slackFactor', 'type': 'float'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. - :paramtype delay_evaluation: int - :keyword evaluation_interval: Interval (number of runs) between policy evaluations. - :paramtype evaluation_interval: int - :keyword slack_amount: Absolute distance allowed from the best performing run. - :paramtype slack_amount: float - :keyword slack_factor: Ratio of the allowed distance from the best performing run. - :paramtype slack_factor: float - """ - super(BanditPolicy, self).__init__(**kwargs) - self.policy_type = 'Bandit' # type: str - self.slack_amount = kwargs.get('slack_amount', 0) - self.slack_factor = kwargs.get('slack_factor', 0) - - -class BaseEnvironmentSource(msrest.serialization.Model): - """BaseEnvironmentSource. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BaseEnvironmentId. - - All required parameters must be populated in order to send to Azure. - - :ivar base_environment_source_type: Required. [Required] Base environment type.Constant filled - by server. Possible values include: "EnvironmentAsset". - :vartype base_environment_source_type: str or - ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSourceType - """ - - _validation = { - 'base_environment_source_type': {'required': True}, - } - - _attribute_map = { - 'base_environment_source_type': {'key': 'baseEnvironmentSourceType', 'type': 'str'}, - } - - _subtype_map = { - 'base_environment_source_type': {'EnvironmentAsset': 'BaseEnvironmentId'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(BaseEnvironmentSource, self).__init__(**kwargs) - self.base_environment_source_type = None # type: Optional[str] - - -class BaseEnvironmentId(BaseEnvironmentSource): - """Base environment type. - - All required parameters must be populated in order to send to Azure. - - :ivar base_environment_source_type: Required. [Required] Base environment type.Constant filled - by server. Possible values include: "EnvironmentAsset". - :vartype base_environment_source_type: str or - ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSourceType - :ivar resource_id: Required. [Required] Resource id accepting ArmId or AzureMlId. - :vartype resource_id: str - """ - - _validation = { - 'base_environment_source_type': {'required': True}, - 'resource_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'base_environment_source_type': {'key': 'baseEnvironmentSourceType', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword resource_id: Required. [Required] Resource id accepting ArmId or AzureMlId. - :paramtype resource_id: str - """ - super(BaseEnvironmentId, self).__init__(**kwargs) - self.base_environment_source_type = 'EnvironmentAsset' # type: str - self.resource_id = kwargs['resource_id'] - - -class Resource(msrest.serialization.Model): - """Common fields that are returned in the response for all Azure Resource Manager resources. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(Resource, self).__init__(**kwargs) - self.id = None - self.name = None - self.type = None - self.system_data = None - - -class TrackedResource(Resource): - """The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: A set of tags. Resource tags. - :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. - :vartype location: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword tags: A set of tags. Resource tags. - :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. - :paramtype location: str - """ - super(TrackedResource, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - self.location = kwargs['location'] - - -class BatchDeployment(TrackedResource): - """BatchDeployment. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: A set of tags. Resource tags. - :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. - :vartype location: str - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :vartype kind: str - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.BatchDeploymentProperties - :ivar sku: Sku details required for ARM contract for Autoscaling. - :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'BatchDeploymentProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword tags: A set of tags. Resource tags. - :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. - :paramtype location: str - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :paramtype kind: str - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.BatchDeploymentProperties - :keyword sku: Sku details required for ARM contract for Autoscaling. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - """ - super(BatchDeployment, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.properties = kwargs['properties'] - self.sku = kwargs.get('sku', None) - - -class BatchDeploymentConfiguration(msrest.serialization.Model): - """Properties relevant to different deployment types. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BatchPipelineComponentDeploymentConfiguration. - - All required parameters must be populated in order to send to Azure. - - :ivar deployment_configuration_type: Required. [Required] The type of the deployment.Constant - filled by server. Possible values include: "Model", "PipelineComponent". - :vartype deployment_configuration_type: str or - ~azure.mgmt.machinelearningservices.models.BatchDeploymentConfigurationType - """ - - _validation = { - 'deployment_configuration_type': {'required': True}, - } - - _attribute_map = { - 'deployment_configuration_type': {'key': 'deploymentConfigurationType', 'type': 'str'}, - } - - _subtype_map = { - 'deployment_configuration_type': {'PipelineComponent': 'BatchPipelineComponentDeploymentConfiguration'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(BatchDeploymentConfiguration, self).__init__(**kwargs) - self.deployment_configuration_type = None # type: Optional[str] - - -class EndpointDeploymentPropertiesBase(msrest.serialization.Model): - """Base definition for endpoint deployment. - - :ivar code_configuration: Code configuration for the endpoint deployment. - :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :ivar description: Description of the endpoint deployment. - :vartype description: str - :ivar environment_id: ARM resource ID of the environment specification for the endpoint - deployment. - :vartype environment_id: str - :ivar environment_variables: Environment variables configuration for the deployment. - :vartype environment_variables: dict[str, str] - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - """ - - _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword code_configuration: Code configuration for the endpoint deployment. - :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :keyword description: Description of the endpoint deployment. - :paramtype description: str - :keyword environment_id: ARM resource ID of the environment specification for the endpoint - deployment. - :paramtype environment_id: str - :keyword environment_variables: Environment variables configuration for the deployment. - :paramtype environment_variables: dict[str, str] - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - """ - super(EndpointDeploymentPropertiesBase, self).__init__(**kwargs) - self.code_configuration = kwargs.get('code_configuration', None) - self.description = kwargs.get('description', None) - self.environment_id = kwargs.get('environment_id', None) - self.environment_variables = kwargs.get('environment_variables', None) - self.properties = kwargs.get('properties', None) - - -class BatchDeploymentProperties(EndpointDeploymentPropertiesBase): - """Batch inference settings per deployment. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar code_configuration: Code configuration for the endpoint deployment. - :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :ivar description: Description of the endpoint deployment. - :vartype description: str - :ivar environment_id: ARM resource ID of the environment specification for the endpoint - deployment. - :vartype environment_id: str - :ivar environment_variables: Environment variables configuration for the deployment. - :vartype environment_variables: dict[str, str] - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar compute: Compute target for batch inference operation. - :vartype compute: str - :ivar deployment_configuration: Properties relevant to different deployment types. - :vartype deployment_configuration: - ~azure.mgmt.machinelearningservices.models.BatchDeploymentConfiguration - :ivar error_threshold: Error threshold, if the error count for the entire input goes above this - value, - the batch inference will be aborted. Range is [-1, int.MaxValue]. - For FileDataset, this value is the count of file failures. - For TabularDataset, this value is the count of record failures. - If set to -1 (the lower bound), all failures during batch inference will be ignored. - :vartype error_threshold: int - :ivar logging_level: Logging level for batch inference operation. Possible values include: - "Info", "Warning", "Debug". - :vartype logging_level: str or ~azure.mgmt.machinelearningservices.models.BatchLoggingLevel - :ivar max_concurrency_per_instance: Indicates maximum number of parallelism per instance. - :vartype max_concurrency_per_instance: int - :ivar mini_batch_size: Size of the mini-batch passed to each batch invocation. - For FileDataset, this is the number of files per mini-batch. - For TabularDataset, this is the size of the records in bytes, per mini-batch. - :vartype mini_batch_size: long - :ivar model: Reference to the model asset for the endpoint deployment. - :vartype model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase - :ivar output_action: Indicates how the output will be organized. Possible values include: - "SummaryOnly", "AppendRow". - :vartype output_action: str or ~azure.mgmt.machinelearningservices.models.BatchOutputAction - :ivar output_file_name: Customized output file name for append_row output action. - :vartype output_file_name: str - :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values - include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState - :ivar resources: Indicates compute configuration for the job. - If not provided, will default to the defaults defined in ResourceConfiguration. - :vartype resources: ~azure.mgmt.machinelearningservices.models.DeploymentResourceConfiguration - :ivar retry_settings: Retry Settings for the batch inference operation. - If not provided, will default to the defaults defined in BatchRetrySettings. - :vartype retry_settings: ~azure.mgmt.machinelearningservices.models.BatchRetrySettings - """ - - _validation = { - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'compute': {'key': 'compute', 'type': 'str'}, - 'deployment_configuration': {'key': 'deploymentConfiguration', 'type': 'BatchDeploymentConfiguration'}, - 'error_threshold': {'key': 'errorThreshold', 'type': 'int'}, - 'logging_level': {'key': 'loggingLevel', 'type': 'str'}, - 'max_concurrency_per_instance': {'key': 'maxConcurrencyPerInstance', 'type': 'int'}, - 'mini_batch_size': {'key': 'miniBatchSize', 'type': 'long'}, - 'model': {'key': 'model', 'type': 'AssetReferenceBase'}, - 'output_action': {'key': 'outputAction', 'type': 'str'}, - 'output_file_name': {'key': 'outputFileName', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'resources': {'key': 'resources', 'type': 'DeploymentResourceConfiguration'}, - 'retry_settings': {'key': 'retrySettings', 'type': 'BatchRetrySettings'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword code_configuration: Code configuration for the endpoint deployment. - :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :keyword description: Description of the endpoint deployment. - :paramtype description: str - :keyword environment_id: ARM resource ID of the environment specification for the endpoint - deployment. - :paramtype environment_id: str - :keyword environment_variables: Environment variables configuration for the deployment. - :paramtype environment_variables: dict[str, str] - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword compute: Compute target for batch inference operation. - :paramtype compute: str - :keyword deployment_configuration: Properties relevant to different deployment types. - :paramtype deployment_configuration: - ~azure.mgmt.machinelearningservices.models.BatchDeploymentConfiguration - :keyword error_threshold: Error threshold, if the error count for the entire input goes above - this value, - the batch inference will be aborted. Range is [-1, int.MaxValue]. - For FileDataset, this value is the count of file failures. - For TabularDataset, this value is the count of record failures. - If set to -1 (the lower bound), all failures during batch inference will be ignored. - :paramtype error_threshold: int - :keyword logging_level: Logging level for batch inference operation. Possible values include: - "Info", "Warning", "Debug". - :paramtype logging_level: str or ~azure.mgmt.machinelearningservices.models.BatchLoggingLevel - :keyword max_concurrency_per_instance: Indicates maximum number of parallelism per instance. - :paramtype max_concurrency_per_instance: int - :keyword mini_batch_size: Size of the mini-batch passed to each batch invocation. - For FileDataset, this is the number of files per mini-batch. - For TabularDataset, this is the size of the records in bytes, per mini-batch. - :paramtype mini_batch_size: long - :keyword model: Reference to the model asset for the endpoint deployment. - :paramtype model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase - :keyword output_action: Indicates how the output will be organized. Possible values include: - "SummaryOnly", "AppendRow". - :paramtype output_action: str or ~azure.mgmt.machinelearningservices.models.BatchOutputAction - :keyword output_file_name: Customized output file name for append_row output action. - :paramtype output_file_name: str - :keyword resources: Indicates compute configuration for the job. - If not provided, will default to the defaults defined in ResourceConfiguration. - :paramtype resources: - ~azure.mgmt.machinelearningservices.models.DeploymentResourceConfiguration - :keyword retry_settings: Retry Settings for the batch inference operation. - If not provided, will default to the defaults defined in BatchRetrySettings. - :paramtype retry_settings: ~azure.mgmt.machinelearningservices.models.BatchRetrySettings - """ - super(BatchDeploymentProperties, self).__init__(**kwargs) - self.compute = kwargs.get('compute', None) - self.deployment_configuration = kwargs.get('deployment_configuration', None) - self.error_threshold = kwargs.get('error_threshold', -1) - self.logging_level = kwargs.get('logging_level', None) - self.max_concurrency_per_instance = kwargs.get('max_concurrency_per_instance', 1) - self.mini_batch_size = kwargs.get('mini_batch_size', 10) - self.model = kwargs.get('model', None) - self.output_action = kwargs.get('output_action', None) - self.output_file_name = kwargs.get('output_file_name', "predictions.csv") - self.provisioning_state = None - self.resources = kwargs.get('resources', None) - self.retry_settings = kwargs.get('retry_settings', None) - - -class BatchDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of BatchDeployment entities. - - :ivar next_link: The link to the next page of BatchDeployment objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type BatchDeployment. - :vartype value: list[~azure.mgmt.machinelearningservices.models.BatchDeployment] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[BatchDeployment]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of BatchDeployment objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type BatchDeployment. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.BatchDeployment] - """ - super(BatchDeploymentTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class BatchEndpoint(TrackedResource): - """BatchEndpoint. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: A set of tags. Resource tags. - :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. - :vartype location: str - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :vartype kind: str - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.BatchEndpointProperties - :ivar sku: Sku details required for ARM contract for Autoscaling. - :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'BatchEndpointProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword tags: A set of tags. Resource tags. - :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. - :paramtype location: str - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :paramtype kind: str - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.BatchEndpointProperties - :keyword sku: Sku details required for ARM contract for Autoscaling. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - """ - super(BatchEndpoint, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.properties = kwargs['properties'] - self.sku = kwargs.get('sku', None) - - -class BatchEndpointDefaults(msrest.serialization.Model): - """Batch endpoint default values. - - :ivar deployment_name: Name of the deployment that will be default for the endpoint. - This deployment will end up getting 100% traffic when the endpoint scoring URL is invoked. - :vartype deployment_name: str - """ - - _attribute_map = { - 'deployment_name': {'key': 'deploymentName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword deployment_name: Name of the deployment that will be default for the endpoint. - This deployment will end up getting 100% traffic when the endpoint scoring URL is invoked. - :paramtype deployment_name: str - """ - super(BatchEndpointDefaults, self).__init__(**kwargs) - self.deployment_name = kwargs.get('deployment_name', None) - - -class EndpointPropertiesBase(msrest.serialization.Model): - """Inference Endpoint base definition. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' for - Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. - Possible values include: "AMLToken", "Key", "AADToken". - :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode - :ivar description: Description of the inference endpoint. - :vartype description: str - :ivar keys: EndpointAuthKeys to set initially on an Endpoint. - This property will always be returned as null. AuthKey values must be retrieved using the - ListKeys API. - :vartype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar scoring_uri: Endpoint URI. - :vartype scoring_uri: str - :ivar swagger_uri: Endpoint Swagger URI. - :vartype swagger_uri: str - """ - - _validation = { - 'auth_mode': {'required': True}, - 'scoring_uri': {'readonly': True}, - 'swagger_uri': {'readonly': True}, - } - - _attribute_map = { - 'auth_mode': {'key': 'authMode', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, - 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' - for Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' - does. Possible values include: "AMLToken", "Key", "AADToken". - :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode - :keyword description: Description of the inference endpoint. - :paramtype description: str - :keyword keys: EndpointAuthKeys to set initially on an Endpoint. - This property will always be returned as null. AuthKey values must be retrieved using the - ListKeys API. - :paramtype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - """ - super(EndpointPropertiesBase, self).__init__(**kwargs) - self.auth_mode = kwargs['auth_mode'] - self.description = kwargs.get('description', None) - self.keys = kwargs.get('keys', None) - self.properties = kwargs.get('properties', None) - self.scoring_uri = None - self.swagger_uri = None - - -class BatchEndpointProperties(EndpointPropertiesBase): - """Batch endpoint configuration. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' for - Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. - Possible values include: "AMLToken", "Key", "AADToken". - :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode - :ivar description: Description of the inference endpoint. - :vartype description: str - :ivar keys: EndpointAuthKeys to set initially on an Endpoint. - This property will always be returned as null. AuthKey values must be retrieved using the - ListKeys API. - :vartype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar scoring_uri: Endpoint URI. - :vartype scoring_uri: str - :ivar swagger_uri: Endpoint Swagger URI. - :vartype swagger_uri: str - :ivar defaults: Default values for Batch Endpoint. - :vartype defaults: ~azure.mgmt.machinelearningservices.models.BatchEndpointDefaults - :ivar provisioning_state: Provisioning state for the endpoint. Possible values include: - "Creating", "Deleting", "Succeeded", "Failed", "Updating", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState - """ - - _validation = { - 'auth_mode': {'required': True}, - 'scoring_uri': {'readonly': True}, - 'swagger_uri': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'auth_mode': {'key': 'authMode', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, - 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, - 'defaults': {'key': 'defaults', 'type': 'BatchEndpointDefaults'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' - for Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' - does. Possible values include: "AMLToken", "Key", "AADToken". - :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode - :keyword description: Description of the inference endpoint. - :paramtype description: str - :keyword keys: EndpointAuthKeys to set initially on an Endpoint. - This property will always be returned as null. AuthKey values must be retrieved using the - ListKeys API. - :paramtype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword defaults: Default values for Batch Endpoint. - :paramtype defaults: ~azure.mgmt.machinelearningservices.models.BatchEndpointDefaults - """ - super(BatchEndpointProperties, self).__init__(**kwargs) - self.defaults = kwargs.get('defaults', None) - self.provisioning_state = None - - -class BatchEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of BatchEndpoint entities. - - :ivar next_link: The link to the next page of BatchEndpoint objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type BatchEndpoint. - :vartype value: list[~azure.mgmt.machinelearningservices.models.BatchEndpoint] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[BatchEndpoint]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of BatchEndpoint objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type BatchEndpoint. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.BatchEndpoint] - """ - super(BatchEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class BatchPipelineComponentDeploymentConfiguration(BatchDeploymentConfiguration): - """Properties for a Batch Pipeline Component Deployment. - - All required parameters must be populated in order to send to Azure. - - :ivar deployment_configuration_type: Required. [Required] The type of the deployment.Constant - filled by server. Possible values include: "Model", "PipelineComponent". - :vartype deployment_configuration_type: str or - ~azure.mgmt.machinelearningservices.models.BatchDeploymentConfigurationType - :ivar component_id: The ARM id of the component to be run. - :vartype component_id: ~azure.mgmt.machinelearningservices.models.IdAssetReference - :ivar description: The description which will be applied to the job. - :vartype description: str - :ivar settings: Run-time settings for the pipeline job. - :vartype settings: dict[str, str] - :ivar tags: A set of tags. The tags which will be applied to the job. - :vartype tags: dict[str, str] - """ - - _validation = { - 'deployment_configuration_type': {'required': True}, - } - - _attribute_map = { - 'deployment_configuration_type': {'key': 'deploymentConfigurationType', 'type': 'str'}, - 'component_id': {'key': 'componentId', 'type': 'IdAssetReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'settings': {'key': 'settings', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword component_id: The ARM id of the component to be run. - :paramtype component_id: ~azure.mgmt.machinelearningservices.models.IdAssetReference - :keyword description: The description which will be applied to the job. - :paramtype description: str - :keyword settings: Run-time settings for the pipeline job. - :paramtype settings: dict[str, str] - :keyword tags: A set of tags. The tags which will be applied to the job. - :paramtype tags: dict[str, str] - """ - super(BatchPipelineComponentDeploymentConfiguration, self).__init__(**kwargs) - self.deployment_configuration_type = 'PipelineComponent' # type: str - self.component_id = kwargs.get('component_id', None) - self.description = kwargs.get('description', None) - self.settings = kwargs.get('settings', None) - self.tags = kwargs.get('tags', None) - - -class BatchRetrySettings(msrest.serialization.Model): - """Retry settings for a batch inference operation. - - :ivar max_retries: Maximum retry count for a mini-batch. - :vartype max_retries: int - :ivar timeout: Invocation timeout for a mini-batch, in ISO 8601 format. - :vartype timeout: ~datetime.timedelta - """ - - _attribute_map = { - 'max_retries': {'key': 'maxRetries', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword max_retries: Maximum retry count for a mini-batch. - :paramtype max_retries: int - :keyword timeout: Invocation timeout for a mini-batch, in ISO 8601 format. - :paramtype timeout: ~datetime.timedelta - """ - super(BatchRetrySettings, self).__init__(**kwargs) - self.max_retries = kwargs.get('max_retries', 3) - self.timeout = kwargs.get('timeout', "PT30S") - - -class SamplingAlgorithm(msrest.serialization.Model): - """The Sampling Algorithm used to generate hyperparameter values, along with properties to -configure the algorithm. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BayesianSamplingAlgorithm, GridSamplingAlgorithm, RandomSamplingAlgorithm. - - All required parameters must be populated in order to send to Azure. - - :ivar sampling_algorithm_type: Required. [Required] The algorithm used for generating - hyperparameter values, along with configuration properties.Constant filled by server. Possible - values include: "Grid", "Random", "Bayesian". - :vartype sampling_algorithm_type: str or - ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType - """ - - _validation = { - 'sampling_algorithm_type': {'required': True}, - } - - _attribute_map = { - 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, - } - - _subtype_map = { - 'sampling_algorithm_type': {'Bayesian': 'BayesianSamplingAlgorithm', 'Grid': 'GridSamplingAlgorithm', 'Random': 'RandomSamplingAlgorithm'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(SamplingAlgorithm, self).__init__(**kwargs) - self.sampling_algorithm_type = None # type: Optional[str] - - -class BayesianSamplingAlgorithm(SamplingAlgorithm): - """Defines a Sampling Algorithm that generates values based on previous values. - - All required parameters must be populated in order to send to Azure. - - :ivar sampling_algorithm_type: Required. [Required] The algorithm used for generating - hyperparameter values, along with configuration properties.Constant filled by server. Possible - values include: "Grid", "Random", "Bayesian". - :vartype sampling_algorithm_type: str or - ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType - """ - - _validation = { - 'sampling_algorithm_type': {'required': True}, - } - - _attribute_map = { - 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(BayesianSamplingAlgorithm, self).__init__(**kwargs) - self.sampling_algorithm_type = 'Bayesian' # type: str - - -class BindOptions(msrest.serialization.Model): - """BindOptions. - - :ivar propagation: Type of Bind Option. - :vartype propagation: str - :ivar create_host_path: Indicate whether to create host path. - :vartype create_host_path: bool - :ivar selinux: Mention the selinux options. - :vartype selinux: str - """ - - _attribute_map = { - 'propagation': {'key': 'propagation', 'type': 'str'}, - 'create_host_path': {'key': 'createHostPath', 'type': 'bool'}, - 'selinux': {'key': 'selinux', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword propagation: Type of Bind Option. - :paramtype propagation: str - :keyword create_host_path: Indicate whether to create host path. - :paramtype create_host_path: bool - :keyword selinux: Mention the selinux options. - :paramtype selinux: str - """ - super(BindOptions, self).__init__(**kwargs) - self.propagation = kwargs.get('propagation', None) - self.create_host_path = kwargs.get('create_host_path', None) - self.selinux = kwargs.get('selinux', None) - - -class BlobReferenceForConsumptionDto(msrest.serialization.Model): - """BlobReferenceForConsumptionDto. - - :ivar blob_uri: Blob URI path for client to upload data. - Example: https://blob.windows.core.net/Container/Path. - :vartype blob_uri: str - :ivar credential: Credential info to access storage account. - :vartype credential: ~azure.mgmt.machinelearningservices.models.PendingUploadCredentialDto - :ivar storage_account_arm_id: Arm ID of the storage account to use. - :vartype storage_account_arm_id: str - """ - - _attribute_map = { - 'blob_uri': {'key': 'blobUri', 'type': 'str'}, - 'credential': {'key': 'credential', 'type': 'PendingUploadCredentialDto'}, - 'storage_account_arm_id': {'key': 'storageAccountArmId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword blob_uri: Blob URI path for client to upload data. - Example: https://blob.windows.core.net/Container/Path. - :paramtype blob_uri: str - :keyword credential: Credential info to access storage account. - :paramtype credential: ~azure.mgmt.machinelearningservices.models.PendingUploadCredentialDto - :keyword storage_account_arm_id: Arm ID of the storage account to use. - :paramtype storage_account_arm_id: str - """ - super(BlobReferenceForConsumptionDto, self).__init__(**kwargs) - self.blob_uri = kwargs.get('blob_uri', None) - self.credential = kwargs.get('credential', None) - self.storage_account_arm_id = kwargs.get('storage_account_arm_id', None) - - -class BuildContext(msrest.serialization.Model): - """Configuration settings for Docker build context. - - All required parameters must be populated in order to send to Azure. - - :ivar context_uri: Required. [Required] URI of the Docker build context used to build the - image. Supports blob URIs on environment creation and may return blob or Git URIs. - - - .. raw:: html - - . - :vartype context_uri: str - :ivar dockerfile_path: Path to the Dockerfile in the build context. - - - .. raw:: html - - . - :vartype dockerfile_path: str - """ - - _validation = { - 'context_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'context_uri': {'key': 'contextUri', 'type': 'str'}, - 'dockerfile_path': {'key': 'dockerfilePath', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword context_uri: Required. [Required] URI of the Docker build context used to build the - image. Supports blob URIs on environment creation and may return blob or Git URIs. - - - .. raw:: html - - . - :paramtype context_uri: str - :keyword dockerfile_path: Path to the Dockerfile in the build context. - - - .. raw:: html - - . - :paramtype dockerfile_path: str - """ - super(BuildContext, self).__init__(**kwargs) - self.context_uri = kwargs['context_uri'] - self.dockerfile_path = kwargs.get('dockerfile_path', "Dockerfile") - - -class DataDriftMetricThresholdBase(msrest.serialization.Model): - """DataDriftMetricThresholdBase. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CategoricalDataDriftMetricThreshold, NumericalDataDriftMetricThreshold. - - All required parameters must be populated in order to send to Azure. - - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType - :ivar threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - - _validation = { - 'data_type': {'required': True}, - } - - _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - } - - _subtype_map = { - 'data_type': {'Categorical': 'CategoricalDataDriftMetricThreshold', 'Numerical': 'NumericalDataDriftMetricThreshold'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - super(DataDriftMetricThresholdBase, self).__init__(**kwargs) - self.data_type = None # type: Optional[str] - self.threshold = kwargs.get('threshold', None) - - -class CategoricalDataDriftMetricThreshold(DataDriftMetricThresholdBase): - """CategoricalDataDriftMetricThreshold. - - All required parameters must be populated in order to send to Azure. - - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType - :ivar threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The categorical data drift metric to calculate. Possible - values include: "JensenShannonDistance", "PopulationStabilityIndex", "PearsonsChiSquaredTest". - :vartype metric: str or ~azure.mgmt.machinelearningservices.models.CategoricalDataDriftMetric - """ - - _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, - } - - _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The categorical data drift metric to calculate. Possible - values include: "JensenShannonDistance", "PopulationStabilityIndex", "PearsonsChiSquaredTest". - :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.CategoricalDataDriftMetric - """ - super(CategoricalDataDriftMetricThreshold, self).__init__(**kwargs) - self.data_type = 'Categorical' # type: str - self.metric = kwargs['metric'] - - -class DataQualityMetricThresholdBase(msrest.serialization.Model): - """DataQualityMetricThresholdBase. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CategoricalDataQualityMetricThreshold, NumericalDataQualityMetricThreshold. - - All required parameters must be populated in order to send to Azure. - - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType - :ivar threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - - _validation = { - 'data_type': {'required': True}, - } - - _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - } - - _subtype_map = { - 'data_type': {'Categorical': 'CategoricalDataQualityMetricThreshold', 'Numerical': 'NumericalDataQualityMetricThreshold'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - super(DataQualityMetricThresholdBase, self).__init__(**kwargs) - self.data_type = None # type: Optional[str] - self.threshold = kwargs.get('threshold', None) - - -class CategoricalDataQualityMetricThreshold(DataQualityMetricThresholdBase): - """CategoricalDataQualityMetricThreshold. - - All required parameters must be populated in order to send to Azure. - - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType - :ivar threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The categorical data quality metric to calculate. Possible - values include: "NullValueRate", "DataTypeErrorRate", "OutOfBoundsRate". - :vartype metric: str or ~azure.mgmt.machinelearningservices.models.CategoricalDataQualityMetric - """ - - _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, - } - - _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The categorical data quality metric to calculate. - Possible values include: "NullValueRate", "DataTypeErrorRate", "OutOfBoundsRate". - :paramtype metric: str or - ~azure.mgmt.machinelearningservices.models.CategoricalDataQualityMetric - """ - super(CategoricalDataQualityMetricThreshold, self).__init__(**kwargs) - self.data_type = 'Categorical' # type: str - self.metric = kwargs['metric'] - - -class PredictionDriftMetricThresholdBase(msrest.serialization.Model): - """PredictionDriftMetricThresholdBase. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CategoricalPredictionDriftMetricThreshold, NumericalPredictionDriftMetricThreshold. - - All required parameters must be populated in order to send to Azure. - - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType - :ivar threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - - _validation = { - 'data_type': {'required': True}, - } - - _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - } - - _subtype_map = { - 'data_type': {'Categorical': 'CategoricalPredictionDriftMetricThreshold', 'Numerical': 'NumericalPredictionDriftMetricThreshold'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - super(PredictionDriftMetricThresholdBase, self).__init__(**kwargs) - self.data_type = None # type: Optional[str] - self.threshold = kwargs.get('threshold', None) - - -class CategoricalPredictionDriftMetricThreshold(PredictionDriftMetricThresholdBase): - """CategoricalPredictionDriftMetricThreshold. - - All required parameters must be populated in order to send to Azure. - - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType - :ivar threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The categorical prediction drift metric to calculate. - Possible values include: "JensenShannonDistance", "PopulationStabilityIndex", - "PearsonsChiSquaredTest". - :vartype metric: str or - ~azure.mgmt.machinelearningservices.models.CategoricalPredictionDriftMetric - """ - - _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, - } - - _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The categorical prediction drift metric to calculate. - Possible values include: "JensenShannonDistance", "PopulationStabilityIndex", - "PearsonsChiSquaredTest". - :paramtype metric: str or - ~azure.mgmt.machinelearningservices.models.CategoricalPredictionDriftMetric - """ - super(CategoricalPredictionDriftMetricThreshold, self).__init__(**kwargs) - self.data_type = 'Categorical' # type: str - self.metric = kwargs['metric'] - - -class CertificateDatastoreCredentials(DatastoreCredentials): - """Certificate datastore credentials configuration. - - All required parameters must be populated in order to send to Azure. - - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". - :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType - :ivar authority_url: Authority URL used for authentication. - :vartype authority_url: str - :ivar client_id: Required. [Required] Service principal client ID. - :vartype client_id: str - :ivar resource_url: Resource the service principal has access to. - :vartype resource_url: str - :ivar secrets: Required. [Required] Service principal secrets. - :vartype secrets: ~azure.mgmt.machinelearningservices.models.CertificateDatastoreSecrets - :ivar tenant_id: Required. [Required] ID of the tenant to which the service principal belongs. - :vartype tenant_id: str - :ivar thumbprint: Required. [Required] Thumbprint of the certificate used for authentication. - :vartype thumbprint: str - """ - - _validation = { - 'credentials_type': {'required': True}, - 'client_id': {'required': True}, - 'secrets': {'required': True}, - 'tenant_id': {'required': True}, - 'thumbprint': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'authority_url': {'key': 'authorityUrl', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'resource_url': {'key': 'resourceUrl', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'CertificateDatastoreSecrets'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'thumbprint': {'key': 'thumbprint', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword authority_url: Authority URL used for authentication. - :paramtype authority_url: str - :keyword client_id: Required. [Required] Service principal client ID. - :paramtype client_id: str - :keyword resource_url: Resource the service principal has access to. - :paramtype resource_url: str - :keyword secrets: Required. [Required] Service principal secrets. - :paramtype secrets: ~azure.mgmt.machinelearningservices.models.CertificateDatastoreSecrets - :keyword tenant_id: Required. [Required] ID of the tenant to which the service principal - belongs. - :paramtype tenant_id: str - :keyword thumbprint: Required. [Required] Thumbprint of the certificate used for - authentication. - :paramtype thumbprint: str - """ - super(CertificateDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'Certificate' # type: str - self.authority_url = kwargs.get('authority_url', None) - self.client_id = kwargs['client_id'] - self.resource_url = kwargs.get('resource_url', None) - self.secrets = kwargs['secrets'] - self.tenant_id = kwargs['tenant_id'] - self.thumbprint = kwargs['thumbprint'] - - -class CertificateDatastoreSecrets(DatastoreSecrets): - """Datastore certificate secrets. - - All required parameters must be populated in order to send to Azure. - - :ivar secrets_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", "Sas", - "ServicePrincipal", "KerberosPassword", "KerberosKeytab". - :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType - :ivar certificate: Service principal certificate. - :vartype certificate: str - """ - - _validation = { - 'secrets_type': {'required': True}, - } - - _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'certificate': {'key': 'certificate', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword certificate: Service principal certificate. - :paramtype certificate: str - """ - super(CertificateDatastoreSecrets, self).__init__(**kwargs) - self.secrets_type = 'Certificate' # type: str - self.certificate = kwargs.get('certificate', None) - - -class TableVertical(msrest.serialization.Model): - """Abstract class for AutoML tasks that use table dataset as input - such as Classification/Regression/Forecasting. - - :ivar cv_split_column_names: Columns to use for CVSplit data. - :vartype cv_split_column_names: list[str] - :ivar featurization_settings: Featurization inputs needed for AutoML job. - :vartype featurization_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings - :ivar fixed_parameters: Model/training parameters that will remain constant throughout - training. - :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters - :ivar limit_settings: Execution constraints for AutoMLJob. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings - :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset - when validation dataset is not provided. - :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] - :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings - :ivar test_data: Test data input. - :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation - purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype test_data_size: float - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar weight_column_name: The name of the sample weight column. Automated ML supports a - weighted column as an input, causing rows in the data to be weighted up or down. - :vartype weight_column_name: str - """ - - _attribute_map = { - 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'TableFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, - 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, - 'search_space': {'key': 'searchSpace', 'type': '[TableParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'TableSweepSettings'}, - 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, - 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword cv_split_column_names: Columns to use for CVSplit data. - :paramtype cv_split_column_names: list[str] - :keyword featurization_settings: Featurization inputs needed for AutoML job. - :paramtype featurization_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings - :keyword fixed_parameters: Model/training parameters that will remain constant throughout - training. - :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters - :keyword limit_settings: Execution constraints for AutoMLJob. - :paramtype limit_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings - :keyword n_cross_validations: Number of cross validation folds to be applied on training - dataset - when validation dataset is not provided. - :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] - :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings - :keyword test_data: Test data input. - :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation - purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype test_data_size: float - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword weight_column_name: The name of the sample weight column. Automated ML supports a - weighted column as an input, causing rows in the data to be weighted up or down. - :paramtype weight_column_name: str - """ - super(TableVertical, self).__init__(**kwargs) - self.cv_split_column_names = kwargs.get('cv_split_column_names', None) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.n_cross_validations = kwargs.get('n_cross_validations', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.test_data = kwargs.get('test_data', None) - self.test_data_size = kwargs.get('test_data_size', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.weight_column_name = kwargs.get('weight_column_name', None) - - -class Classification(AutoMLVertical, TableVertical): - """Classification task in AutoML Table vertical. - - All required parameters must be populated in order to send to Azure. - - :ivar cv_split_column_names: Columns to use for CVSplit data. - :vartype cv_split_column_names: list[str] - :ivar featurization_settings: Featurization inputs needed for AutoML job. - :vartype featurization_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings - :ivar fixed_parameters: Model/training parameters that will remain constant throughout - training. - :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters - :ivar limit_settings: Execution constraints for AutoMLJob. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings - :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset - when validation dataset is not provided. - :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] - :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings - :ivar test_data: Test data input. - :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation - purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype test_data_size: float - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar weight_column_name: The name of the sample weight column. Automated ML supports a - weighted column as an input, causing rows in the data to be weighted up or down. - :vartype weight_column_name: str - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar positive_label: Positive label for binary metrics calculation. - :vartype positive_label: str - :ivar primary_metric: Primary metric for the task. Possible values include: "AUCWeighted", - "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics - :ivar training_settings: Inputs for training phase for an AutoML Job. - :vartype training_settings: - ~azure.mgmt.machinelearningservices.models.ClassificationTrainingSettings - """ - - _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, - } - - _attribute_map = { - 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'TableFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, - 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, - 'search_space': {'key': 'searchSpace', 'type': '[TableParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'TableSweepSettings'}, - 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, - 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'positive_label': {'key': 'positiveLabel', 'type': 'str'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - 'training_settings': {'key': 'trainingSettings', 'type': 'ClassificationTrainingSettings'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword cv_split_column_names: Columns to use for CVSplit data. - :paramtype cv_split_column_names: list[str] - :keyword featurization_settings: Featurization inputs needed for AutoML job. - :paramtype featurization_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings - :keyword fixed_parameters: Model/training parameters that will remain constant throughout - training. - :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters - :keyword limit_settings: Execution constraints for AutoMLJob. - :paramtype limit_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings - :keyword n_cross_validations: Number of cross validation folds to be applied on training - dataset - when validation dataset is not provided. - :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] - :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings - :keyword test_data: Test data input. - :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation - purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype test_data_size: float - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword weight_column_name: The name of the sample weight column. Automated ML supports a - weighted column as an input, causing rows in the data to be weighted up or down. - :paramtype weight_column_name: str - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword positive_label: Positive label for binary metrics calculation. - :paramtype positive_label: str - :keyword primary_metric: Primary metric for the task. Possible values include: "AUCWeighted", - "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted". - :paramtype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics - :keyword training_settings: Inputs for training phase for an AutoML Job. - :paramtype training_settings: - ~azure.mgmt.machinelearningservices.models.ClassificationTrainingSettings - """ - super(Classification, self).__init__(**kwargs) - self.cv_split_column_names = kwargs.get('cv_split_column_names', None) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.n_cross_validations = kwargs.get('n_cross_validations', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.test_data = kwargs.get('test_data', None) - self.test_data_size = kwargs.get('test_data_size', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.weight_column_name = kwargs.get('weight_column_name', None) - self.task_type = 'Classification' # type: str - self.positive_label = kwargs.get('positive_label', None) - self.primary_metric = kwargs.get('primary_metric', None) - self.training_settings = kwargs.get('training_settings', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] - - -class ModelPerformanceMetricThresholdBase(msrest.serialization.Model): - """ModelPerformanceMetricThresholdBase. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ClassificationModelPerformanceMetricThreshold, RegressionModelPerformanceMetricThreshold. - - All required parameters must be populated in order to send to Azure. - - :ivar model_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Classification", "Regression". - :vartype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType - :ivar threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - - _validation = { - 'model_type': {'required': True}, - } - - _attribute_map = { - 'model_type': {'key': 'modelType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - } - - _subtype_map = { - 'model_type': {'Classification': 'ClassificationModelPerformanceMetricThreshold', 'Regression': 'RegressionModelPerformanceMetricThreshold'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - super(ModelPerformanceMetricThresholdBase, self).__init__(**kwargs) - self.model_type = None # type: Optional[str] - self.threshold = kwargs.get('threshold', None) - - -class ClassificationModelPerformanceMetricThreshold(ModelPerformanceMetricThresholdBase): - """ClassificationModelPerformanceMetricThreshold. - - All required parameters must be populated in order to send to Azure. - - :ivar model_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Classification", "Regression". - :vartype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType - :ivar threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The classification model performance to calculate. Possible - values include: "Accuracy", "Precision", "Recall". - :vartype metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationModelPerformanceMetric - """ - - _validation = { - 'model_type': {'required': True}, - 'metric': {'required': True}, - } - - _attribute_map = { - 'model_type': {'key': 'modelType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The classification model performance to calculate. - Possible values include: "Accuracy", "Precision", "Recall". - :paramtype metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationModelPerformanceMetric - """ - super(ClassificationModelPerformanceMetricThreshold, self).__init__(**kwargs) - self.model_type = 'Classification' # type: str - self.metric = kwargs['metric'] - - -class TrainingSettings(msrest.serialization.Model): - """Training related configuration. - - :ivar enable_dnn_training: Enable recommendation of DNN models. - :vartype enable_dnn_training: bool - :ivar enable_model_explainability: Flag to turn on explainability on best model. - :vartype enable_model_explainability: bool - :ivar enable_onnx_compatible_models: Flag for enabling onnx compatible models. - :vartype enable_onnx_compatible_models: bool - :ivar enable_stack_ensemble: Enable stack ensemble run. - :vartype enable_stack_ensemble: bool - :ivar enable_vote_ensemble: Enable voting ensemble run. - :vartype enable_vote_ensemble: bool - :ivar ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model - generation, multiple fitted models from the previous child runs are downloaded. - Configure this parameter with a higher value than 300 secs, if more time is needed. - :vartype ensemble_model_download_timeout: ~datetime.timedelta - :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. - :vartype stack_ensemble_settings: - ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings - :ivar training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to - 'non-distributed' for now, however in the future may result in mixed mode or heuristics based - mode selection. Default is 'auto'. - If 'Distributed' then only distributed featurization is used and distributed algorithms are - chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". - :vartype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode - """ - - _attribute_map = { - 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, - 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, - 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, - 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, - 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, - 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, - 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, - 'training_mode': {'key': 'trainingMode', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword enable_dnn_training: Enable recommendation of DNN models. - :paramtype enable_dnn_training: bool - :keyword enable_model_explainability: Flag to turn on explainability on best model. - :paramtype enable_model_explainability: bool - :keyword enable_onnx_compatible_models: Flag for enabling onnx compatible models. - :paramtype enable_onnx_compatible_models: bool - :keyword enable_stack_ensemble: Enable stack ensemble run. - :paramtype enable_stack_ensemble: bool - :keyword enable_vote_ensemble: Enable voting ensemble run. - :paramtype enable_vote_ensemble: bool - :keyword ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model - generation, multiple fitted models from the previous child runs are downloaded. - Configure this parameter with a higher value than 300 secs, if more time is needed. - :paramtype ensemble_model_download_timeout: ~datetime.timedelta - :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. - :paramtype stack_ensemble_settings: - ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings - :keyword training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to - 'non-distributed' for now, however in the future may result in mixed mode or heuristics based - mode selection. Default is 'auto'. - If 'Distributed' then only distributed featurization is used and distributed algorithms are - chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". - :paramtype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode - """ - super(TrainingSettings, self).__init__(**kwargs) - self.enable_dnn_training = kwargs.get('enable_dnn_training', False) - self.enable_model_explainability = kwargs.get('enable_model_explainability', True) - self.enable_onnx_compatible_models = kwargs.get('enable_onnx_compatible_models', False) - self.enable_stack_ensemble = kwargs.get('enable_stack_ensemble', True) - self.enable_vote_ensemble = kwargs.get('enable_vote_ensemble', True) - self.ensemble_model_download_timeout = kwargs.get('ensemble_model_download_timeout', "PT5M") - self.stack_ensemble_settings = kwargs.get('stack_ensemble_settings', None) - self.training_mode = kwargs.get('training_mode', None) - - -class ClassificationTrainingSettings(TrainingSettings): - """Classification Training related configuration. - - :ivar enable_dnn_training: Enable recommendation of DNN models. - :vartype enable_dnn_training: bool - :ivar enable_model_explainability: Flag to turn on explainability on best model. - :vartype enable_model_explainability: bool - :ivar enable_onnx_compatible_models: Flag for enabling onnx compatible models. - :vartype enable_onnx_compatible_models: bool - :ivar enable_stack_ensemble: Enable stack ensemble run. - :vartype enable_stack_ensemble: bool - :ivar enable_vote_ensemble: Enable voting ensemble run. - :vartype enable_vote_ensemble: bool - :ivar ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model - generation, multiple fitted models from the previous child runs are downloaded. - Configure this parameter with a higher value than 300 secs, if more time is needed. - :vartype ensemble_model_download_timeout: ~datetime.timedelta - :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. - :vartype stack_ensemble_settings: - ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings - :ivar training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to - 'non-distributed' for now, however in the future may result in mixed mode or heuristics based - mode selection. Default is 'auto'. - If 'Distributed' then only distributed featurization is used and distributed algorithms are - chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". - :vartype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode - :ivar allowed_training_algorithms: Allowed models for classification task. - :vartype allowed_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.ClassificationModels] - :ivar blocked_training_algorithms: Blocked models for classification task. - :vartype blocked_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.ClassificationModels] - """ - - _attribute_map = { - 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, - 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, - 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, - 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, - 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, - 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, - 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, - 'training_mode': {'key': 'trainingMode', 'type': 'str'}, - 'allowed_training_algorithms': {'key': 'allowedTrainingAlgorithms', 'type': '[str]'}, - 'blocked_training_algorithms': {'key': 'blockedTrainingAlgorithms', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword enable_dnn_training: Enable recommendation of DNN models. - :paramtype enable_dnn_training: bool - :keyword enable_model_explainability: Flag to turn on explainability on best model. - :paramtype enable_model_explainability: bool - :keyword enable_onnx_compatible_models: Flag for enabling onnx compatible models. - :paramtype enable_onnx_compatible_models: bool - :keyword enable_stack_ensemble: Enable stack ensemble run. - :paramtype enable_stack_ensemble: bool - :keyword enable_vote_ensemble: Enable voting ensemble run. - :paramtype enable_vote_ensemble: bool - :keyword ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model - generation, multiple fitted models from the previous child runs are downloaded. - Configure this parameter with a higher value than 300 secs, if more time is needed. - :paramtype ensemble_model_download_timeout: ~datetime.timedelta - :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. - :paramtype stack_ensemble_settings: - ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings - :keyword training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to - 'non-distributed' for now, however in the future may result in mixed mode or heuristics based - mode selection. Default is 'auto'. - If 'Distributed' then only distributed featurization is used and distributed algorithms are - chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". - :paramtype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode - :keyword allowed_training_algorithms: Allowed models for classification task. - :paramtype allowed_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.ClassificationModels] - :keyword blocked_training_algorithms: Blocked models for classification task. - :paramtype blocked_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.ClassificationModels] - """ - super(ClassificationTrainingSettings, self).__init__(**kwargs) - self.allowed_training_algorithms = kwargs.get('allowed_training_algorithms', None) - self.blocked_training_algorithms = kwargs.get('blocked_training_algorithms', None) - - -class ClusterUpdateParameters(msrest.serialization.Model): - """AmlCompute update parameters. - - :ivar properties: Properties of ClusterUpdate. - :vartype properties: ~azure.mgmt.machinelearningservices.models.ScaleSettingsInformation - """ - - _attribute_map = { - 'properties': {'key': 'properties.properties', 'type': 'ScaleSettingsInformation'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Properties of ClusterUpdate. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.ScaleSettingsInformation - """ - super(ClusterUpdateParameters, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class ExportSummary(msrest.serialization.Model): - """ExportSummary. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CsvExportSummary, CocoExportSummary, DatasetExportSummary. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar end_date_time: The time when the export was completed. - :vartype end_date_time: ~datetime.datetime - :ivar exported_row_count: The total number of labeled datapoints exported. - :vartype exported_row_count: long - :ivar format: Required. [Required] The format of exported labels, also as the - discriminator.Constant filled by server. Possible values include: "Dataset", "Coco", "CSV". - :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType - :ivar labeling_job_id: Name and identifier of the job containing exported labels. - :vartype labeling_job_id: str - :ivar start_date_time: The time when the export was requested. - :vartype start_date_time: ~datetime.datetime - """ - - _validation = { - 'end_date_time': {'readonly': True}, - 'exported_row_count': {'readonly': True}, - 'format': {'required': True}, - 'labeling_job_id': {'readonly': True}, - 'start_date_time': {'readonly': True}, - } - - _attribute_map = { - 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, - 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, - 'format': {'key': 'format', 'type': 'str'}, - 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, - 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, - } - - _subtype_map = { - 'format': {'CSV': 'CsvExportSummary', 'Coco': 'CocoExportSummary', 'Dataset': 'DatasetExportSummary'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ExportSummary, self).__init__(**kwargs) - self.end_date_time = None - self.exported_row_count = None - self.format = None # type: Optional[str] - self.labeling_job_id = None - self.start_date_time = None - - -class CocoExportSummary(ExportSummary): - """CocoExportSummary. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar end_date_time: The time when the export was completed. - :vartype end_date_time: ~datetime.datetime - :ivar exported_row_count: The total number of labeled datapoints exported. - :vartype exported_row_count: long - :ivar format: Required. [Required] The format of exported labels, also as the - discriminator.Constant filled by server. Possible values include: "Dataset", "Coco", "CSV". - :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType - :ivar labeling_job_id: Name and identifier of the job containing exported labels. - :vartype labeling_job_id: str - :ivar start_date_time: The time when the export was requested. - :vartype start_date_time: ~datetime.datetime - :ivar container_name: The container name to which the labels will be exported. - :vartype container_name: str - :ivar snapshot_path: The output path where the labels will be exported. - :vartype snapshot_path: str - """ - - _validation = { - 'end_date_time': {'readonly': True}, - 'exported_row_count': {'readonly': True}, - 'format': {'required': True}, - 'labeling_job_id': {'readonly': True}, - 'start_date_time': {'readonly': True}, - 'container_name': {'readonly': True}, - 'snapshot_path': {'readonly': True}, - } - - _attribute_map = { - 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, - 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, - 'format': {'key': 'format', 'type': 'str'}, - 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, - 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, - 'container_name': {'key': 'containerName', 'type': 'str'}, - 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(CocoExportSummary, self).__init__(**kwargs) - self.format = 'Coco' # type: str - self.container_name = None - self.snapshot_path = None - - -class CodeConfiguration(msrest.serialization.Model): - """Configuration for a scoring code asset. - - All required parameters must be populated in order to send to Azure. - - :ivar code_id: ARM resource ID of the code asset. - :vartype code_id: str - :ivar scoring_script: Required. [Required] The script to execute on startup. eg. "score.py". - :vartype scoring_script: str - """ - - _validation = { - 'scoring_script': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'code_id': {'key': 'codeId', 'type': 'str'}, - 'scoring_script': {'key': 'scoringScript', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword code_id: ARM resource ID of the code asset. - :paramtype code_id: str - :keyword scoring_script: Required. [Required] The script to execute on startup. eg. "score.py". - :paramtype scoring_script: str - """ - super(CodeConfiguration, self).__init__(**kwargs) - self.code_id = kwargs.get('code_id', None) - self.scoring_script = kwargs['scoring_script'] - - -class CodeContainer(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.CodeContainerProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'CodeContainerProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.CodeContainerProperties - """ - super(CodeContainer, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class CodeContainerProperties(AssetContainer): - """Container for code asset versions. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar latest_version: The latest version inside this container. - :vartype latest_version: str - :ivar next_version: The next auto incremental version. - :vartype next_version: str - :ivar provisioning_state: Provisioning state for the code container. Possible values include: - "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState - """ - - _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - """ - super(CodeContainerProperties, self).__init__(**kwargs) - self.provisioning_state = None - - -class CodeContainerResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of CodeContainer entities. - - :ivar next_link: The link to the next page of CodeContainer objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type CodeContainer. - :vartype value: list[~azure.mgmt.machinelearningservices.models.CodeContainer] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[CodeContainer]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of CodeContainer objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type CodeContainer. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.CodeContainer] - """ - super(CodeContainerResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class CodeVersion(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.CodeVersionProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'CodeVersionProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.CodeVersionProperties - """ - super(CodeVersion, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class CodeVersionProperties(AssetBase): - """Code asset version details. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :vartype is_archived: bool - :ivar code_uri: Uri where code is located. - :vartype code_uri: str - :ivar provisioning_state: Provisioning state for the code version. Possible values include: - "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState - """ - - _validation = { - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'code_uri': {'key': 'codeUri', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :paramtype is_archived: bool - :keyword code_uri: Uri where code is located. - :paramtype code_uri: str - """ - super(CodeVersionProperties, self).__init__(**kwargs) - self.code_uri = kwargs.get('code_uri', None) - self.provisioning_state = None - - -class CodeVersionResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of CodeVersion entities. - - :ivar next_link: The link to the next page of CodeVersion objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type CodeVersion. - :vartype value: list[~azure.mgmt.machinelearningservices.models.CodeVersion] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[CodeVersion]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of CodeVersion objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type CodeVersion. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.CodeVersion] - """ - super(CodeVersionResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class Collection(msrest.serialization.Model): - """Collection. - - :ivar client_id: The msi client id used to collect logging to blob storage. If it's - null,backend will pick a registered endpoint identity to auth. - :vartype client_id: str - :ivar data_collection_mode: Enable or disable data collection. Possible values include: - "Enabled", "Disabled". - :vartype data_collection_mode: str or - ~azure.mgmt.machinelearningservices.models.DataCollectionMode - :ivar data_id: The data asset arm resource id. Client side will ensure data asset is pointing - to the blob storage, and backend will collect data to the blob storage. - :vartype data_id: str - :ivar sampling_rate: The sampling rate for collection. Sampling rate 1.0 means we collect 100% - of data by default. - :vartype sampling_rate: float - """ - - _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'data_collection_mode': {'key': 'dataCollectionMode', 'type': 'str'}, - 'data_id': {'key': 'dataId', 'type': 'str'}, - 'sampling_rate': {'key': 'samplingRate', 'type': 'float'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword client_id: The msi client id used to collect logging to blob storage. If it's - null,backend will pick a registered endpoint identity to auth. - :paramtype client_id: str - :keyword data_collection_mode: Enable or disable data collection. Possible values include: - "Enabled", "Disabled". - :paramtype data_collection_mode: str or - ~azure.mgmt.machinelearningservices.models.DataCollectionMode - :keyword data_id: The data asset arm resource id. Client side will ensure data asset is - pointing to the blob storage, and backend will collect data to the blob storage. - :paramtype data_id: str - :keyword sampling_rate: The sampling rate for collection. Sampling rate 1.0 means we collect - 100% of data by default. - :paramtype sampling_rate: float - """ - super(Collection, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.data_collection_mode = kwargs.get('data_collection_mode', None) - self.data_id = kwargs.get('data_id', None) - self.sampling_rate = kwargs.get('sampling_rate', 1) - - -class ColumnTransformer(msrest.serialization.Model): - """Column transformer parameters. - - :ivar fields: Fields to apply transformer logic on. - :vartype fields: list[str] - :ivar parameters: Different properties to be passed to transformer. - Input expected is dictionary of key,value pairs in JSON format. - :vartype parameters: any - """ - - _attribute_map = { - 'fields': {'key': 'fields', 'type': '[str]'}, - 'parameters': {'key': 'parameters', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword fields: Fields to apply transformer logic on. - :paramtype fields: list[str] - :keyword parameters: Different properties to be passed to transformer. - Input expected is dictionary of key,value pairs in JSON format. - :paramtype parameters: any - """ - super(ColumnTransformer, self).__init__(**kwargs) - self.fields = kwargs.get('fields', None) - self.parameters = kwargs.get('parameters', None) - - -class CommandJob(JobBaseProperties): - """Command job definition. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar component_id: ARM resource ID of the component resource. - :vartype component_id: str - :ivar compute_id: ARM resource ID of the compute resource. - :vartype compute_id: str - :ivar display_name: Display name of job. - :vartype display_name: str - :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :vartype experiment_name: str - :ivar identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". - :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType - :ivar notification_setting: Notification setting for the job. - :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :ivar secrets_configuration: Configuration for secrets to be made available during runtime. - :vartype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :ivar services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus - :ivar autologger_settings: Distribution configuration of the job. If set, this should be one of - Mpi, Tensorflow, PyTorch, or null. - :vartype autologger_settings: ~azure.mgmt.machinelearningservices.models.AutologgerSettings - :ivar code_id: ARM resource ID of the code asset. - :vartype code_id: str - :ivar command: Required. [Required] The command to execute on startup of the job. eg. "python - train.py". - :vartype command: str - :ivar distribution: Distribution configuration of the job. If set, this should be one of Mpi, - Tensorflow, PyTorch, Ray, or null. - :vartype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration - :ivar environment_id: Required. [Required] The ARM resource ID of the Environment specification - for the job. - :vartype environment_id: str - :ivar environment_variables: Environment variables included in the job. - :vartype environment_variables: dict[str, str] - :ivar inputs: Mapping of input data bindings used in the job. - :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] - :ivar limits: Command Job limit. - :vartype limits: ~azure.mgmt.machinelearningservices.models.CommandJobLimits - :ivar outputs: Mapping of output data bindings used in the job. - :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] - :ivar parameters: Input parameters. - :vartype parameters: any - :ivar queue_settings: Queue settings for the job. - :vartype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings - :ivar resources: Compute Resource configuration for the job. - :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration - """ - - _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'environment_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'parameters': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'autologger_settings': {'key': 'autologgerSettings', 'type': 'AutologgerSettings'}, - 'code_id': {'key': 'codeId', 'type': 'str'}, - 'command': {'key': 'command', 'type': 'str'}, - 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'limits': {'key': 'limits', 'type': 'CommandJobLimits'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'parameters': {'key': 'parameters', 'type': 'object'}, - 'queue_settings': {'key': 'queueSettings', 'type': 'QueueSettings'}, - 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword component_id: ARM resource ID of the component resource. - :paramtype component_id: str - :keyword compute_id: ARM resource ID of the compute resource. - :paramtype compute_id: str - :keyword display_name: Display name of job. - :paramtype display_name: str - :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :paramtype experiment_name: str - :keyword identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - :keyword notification_setting: Notification setting for the job. - :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :keyword secrets_configuration: Configuration for secrets to be made available during runtime. - :paramtype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :keyword services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :keyword autologger_settings: Distribution configuration of the job. If set, this should be one - of Mpi, Tensorflow, PyTorch, or null. - :paramtype autologger_settings: ~azure.mgmt.machinelearningservices.models.AutologgerSettings - :keyword code_id: ARM resource ID of the code asset. - :paramtype code_id: str - :keyword command: Required. [Required] The command to execute on startup of the job. eg. - "python train.py". - :paramtype command: str - :keyword distribution: Distribution configuration of the job. If set, this should be one of - Mpi, Tensorflow, PyTorch, Ray, or null. - :paramtype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration - :keyword environment_id: Required. [Required] The ARM resource ID of the Environment - specification for the job. - :paramtype environment_id: str - :keyword environment_variables: Environment variables included in the job. - :paramtype environment_variables: dict[str, str] - :keyword inputs: Mapping of input data bindings used in the job. - :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] - :keyword limits: Command Job limit. - :paramtype limits: ~azure.mgmt.machinelearningservices.models.CommandJobLimits - :keyword outputs: Mapping of output data bindings used in the job. - :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] - :keyword queue_settings: Queue settings for the job. - :paramtype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings - :keyword resources: Compute Resource configuration for the job. - :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration - """ - super(CommandJob, self).__init__(**kwargs) - self.job_type = 'Command' # type: str - self.autologger_settings = kwargs.get('autologger_settings', None) - self.code_id = kwargs.get('code_id', None) - self.command = kwargs['command'] - self.distribution = kwargs.get('distribution', None) - self.environment_id = kwargs['environment_id'] - self.environment_variables = kwargs.get('environment_variables', None) - self.inputs = kwargs.get('inputs', None) - self.limits = kwargs.get('limits', None) - self.outputs = kwargs.get('outputs', None) - self.parameters = None - self.queue_settings = kwargs.get('queue_settings', None) - self.resources = kwargs.get('resources', None) - - -class JobLimits(msrest.serialization.Model): - """JobLimits. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CommandJobLimits, SweepJobLimits. - - All required parameters must be populated in order to send to Azure. - - :ivar job_limits_type: Required. [Required] JobLimit type.Constant filled by server. Possible - values include: "Command", "Sweep". - :vartype job_limits_type: str or ~azure.mgmt.machinelearningservices.models.JobLimitsType - :ivar timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. - Only supports duration with precision as low as Seconds. - :vartype timeout: ~datetime.timedelta - """ - - _validation = { - 'job_limits_type': {'required': True}, - } - - _attribute_map = { - 'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, - } - - _subtype_map = { - 'job_limits_type': {'Command': 'CommandJobLimits', 'Sweep': 'SweepJobLimits'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword timeout: The max run duration in ISO 8601 format, after which the job will be - cancelled. Only supports duration with precision as low as Seconds. - :paramtype timeout: ~datetime.timedelta - """ - super(JobLimits, self).__init__(**kwargs) - self.job_limits_type = None # type: Optional[str] - self.timeout = kwargs.get('timeout', None) - - -class CommandJobLimits(JobLimits): - """Command Job limit class. - - All required parameters must be populated in order to send to Azure. - - :ivar job_limits_type: Required. [Required] JobLimit type.Constant filled by server. Possible - values include: "Command", "Sweep". - :vartype job_limits_type: str or ~azure.mgmt.machinelearningservices.models.JobLimitsType - :ivar timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. - Only supports duration with precision as low as Seconds. - :vartype timeout: ~datetime.timedelta - """ - - _validation = { - 'job_limits_type': {'required': True}, - } - - _attribute_map = { - 'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword timeout: The max run duration in ISO 8601 format, after which the job will be - cancelled. Only supports duration with precision as low as Seconds. - :paramtype timeout: ~datetime.timedelta - """ - super(CommandJobLimits, self).__init__(**kwargs) - self.job_limits_type = 'Command' # type: str - - -class ComponentConfiguration(msrest.serialization.Model): - """Used for sweep over component. - - :ivar pipeline_settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. - :vartype pipeline_settings: any - """ - - _attribute_map = { - 'pipeline_settings': {'key': 'pipelineSettings', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword pipeline_settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. - :paramtype pipeline_settings: any - """ - super(ComponentConfiguration, self).__init__(**kwargs) - self.pipeline_settings = kwargs.get('pipeline_settings', None) - - -class ComponentContainer(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.ComponentContainerProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ComponentContainerProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComponentContainerProperties - """ - super(ComponentContainer, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class ComponentContainerProperties(AssetContainer): - """Component container definition. - - -.. raw:: html - - . - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar latest_version: The latest version inside this container. - :vartype latest_version: str - :ivar next_version: The next auto incremental version. - :vartype next_version: str - :ivar provisioning_state: Provisioning state for the component container. Possible values - include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState - """ - - _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - """ - super(ComponentContainerProperties, self).__init__(**kwargs) - self.provisioning_state = None - - -class ComponentContainerResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of ComponentContainer entities. - - :ivar next_link: The link to the next page of ComponentContainer objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type ComponentContainer. - :vartype value: list[~azure.mgmt.machinelearningservices.models.ComponentContainer] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ComponentContainer]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of ComponentContainer objects. If null, there are - no additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type ComponentContainer. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComponentContainer] - """ - super(ComponentContainerResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class ComponentVersion(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.ComponentVersionProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ComponentVersionProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComponentVersionProperties - """ - super(ComponentVersion, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class ComponentVersionProperties(AssetBase): - """Definition of a component version: defines resources that span component types. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :vartype is_archived: bool - :ivar component_spec: Defines Component definition details. - - - .. raw:: html - - . - :vartype component_spec: any - :ivar provisioning_state: Provisioning state for the component version. Possible values - include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState - :ivar stage: Stage in the component lifecycle. - :vartype stage: str - """ - - _validation = { - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'component_spec': {'key': 'componentSpec', 'type': 'object'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'stage': {'key': 'stage', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :paramtype is_archived: bool - :keyword component_spec: Defines Component definition details. - - - .. raw:: html - - . - :paramtype component_spec: any - :keyword stage: Stage in the component lifecycle. - :paramtype stage: str - """ - super(ComponentVersionProperties, self).__init__(**kwargs) - self.component_spec = kwargs.get('component_spec', None) - self.provisioning_state = None - self.stage = kwargs.get('stage', None) - - -class ComponentVersionResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of ComponentVersion entities. - - :ivar next_link: The link to the next page of ComponentVersion objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type ComponentVersion. - :vartype value: list[~azure.mgmt.machinelearningservices.models.ComponentVersion] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ComponentVersion]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of ComponentVersion objects. If null, there are - no additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type ComponentVersion. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComponentVersion] - """ - super(ComponentVersionResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class ComputeInstanceSchema(msrest.serialization.Model): - """Properties(top level) of ComputeInstance. - - :ivar properties: Properties of ComputeInstance. - :vartype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties - """ - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Properties of ComputeInstance. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties - """ - super(ComputeInstanceSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class ComputeInstance(Compute, ComputeInstanceSchema): - """An Azure Machine Learning compute instance. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar properties: Properties of ComputeInstance. - :vartype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar compute_location: Location for the underlying compute. - :vartype compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar description: The description of the Machine Learning compute. - :vartype description: str - :ivar created_on: The time at which the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The time at which the compute was last modified. - :vartype modified_on: ~datetime.datetime - :ivar resource_id: ARM resource id of the underlying compute. - :vartype resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI - and AAD exclusively for authentication. - :vartype disable_local_auth: bool - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Properties of ComputeInstance. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties - :keyword compute_location: Location for the underlying compute. - :paramtype compute_location: str - :keyword description: The description of the Machine Learning compute. - :paramtype description: str - :keyword resource_id: ARM resource id of the underlying compute. - :paramtype resource_id: str - :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only - MSI and AAD exclusively for authentication. - :paramtype disable_local_auth: bool - """ - super(ComputeInstance, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'ComputeInstance' # type: str - self.compute_location = kwargs.get('compute_location', None) - self.provisioning_state = None - self.description = kwargs.get('description', None) - self.created_on = None - self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) - self.provisioning_errors = None - self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) - - -class ComputeInstanceApplication(msrest.serialization.Model): - """Defines an Aml Instance application and its connectivity endpoint URI. - - :ivar display_name: Name of the ComputeInstance application. - :vartype display_name: str - :ivar endpoint_uri: Application' endpoint URI. - :vartype endpoint_uri: str - """ - - _attribute_map = { - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword display_name: Name of the ComputeInstance application. - :paramtype display_name: str - :keyword endpoint_uri: Application' endpoint URI. - :paramtype endpoint_uri: str - """ - super(ComputeInstanceApplication, self).__init__(**kwargs) - self.display_name = kwargs.get('display_name', None) - self.endpoint_uri = kwargs.get('endpoint_uri', None) - - -class ComputeInstanceAutologgerSettings(msrest.serialization.Model): - """Specifies settings for autologger. - - :ivar mlflow_autologger: Indicates whether mlflow autologger is enabled for notebooks. Possible - values include: "Enabled", "Disabled". - :vartype mlflow_autologger: str or ~azure.mgmt.machinelearningservices.models.MlflowAutologger - """ - - _attribute_map = { - 'mlflow_autologger': {'key': 'mlflowAutologger', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mlflow_autologger: Indicates whether mlflow autologger is enabled for notebooks. - Possible values include: "Enabled", "Disabled". - :paramtype mlflow_autologger: str or - ~azure.mgmt.machinelearningservices.models.MlflowAutologger - """ - super(ComputeInstanceAutologgerSettings, self).__init__(**kwargs) - self.mlflow_autologger = kwargs.get('mlflow_autologger', None) - - -class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model): - """Defines all connectivity endpoints and properties for an ComputeInstance. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar public_ip_address: Public IP Address of this ComputeInstance. - :vartype public_ip_address: str - :ivar private_ip_address: Private IP Address of this ComputeInstance (local to the VNET in - which the compute instance is deployed). - :vartype private_ip_address: str - """ - - _validation = { - 'public_ip_address': {'readonly': True}, - 'private_ip_address': {'readonly': True}, - } - - _attribute_map = { - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs) - self.public_ip_address = None - self.private_ip_address = None - - -class ComputeInstanceContainer(msrest.serialization.Model): - """Defines an Aml Instance container. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: Name of the ComputeInstance container. - :vartype name: str - :ivar autosave: Auto save settings. Possible values include: "None", "Local", "Remote". - :vartype autosave: str or ~azure.mgmt.machinelearningservices.models.Autosave - :ivar gpu: Information of GPU. - :vartype gpu: str - :ivar network: network of this container. Possible values include: "Bridge", "Host". - :vartype network: str or ~azure.mgmt.machinelearningservices.models.Network - :ivar environment: Environment information of this container. - :vartype environment: ~azure.mgmt.machinelearningservices.models.ComputeInstanceEnvironmentInfo - :ivar services: services of this containers. - :vartype services: list[any] - """ - - _validation = { - 'services': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'autosave': {'key': 'autosave', 'type': 'str'}, - 'gpu': {'key': 'gpu', 'type': 'str'}, - 'network': {'key': 'network', 'type': 'str'}, - 'environment': {'key': 'environment', 'type': 'ComputeInstanceEnvironmentInfo'}, - 'services': {'key': 'services', 'type': '[object]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword name: Name of the ComputeInstance container. - :paramtype name: str - :keyword autosave: Auto save settings. Possible values include: "None", "Local", "Remote". - :paramtype autosave: str or ~azure.mgmt.machinelearningservices.models.Autosave - :keyword gpu: Information of GPU. - :paramtype gpu: str - :keyword network: network of this container. Possible values include: "Bridge", "Host". - :paramtype network: str or ~azure.mgmt.machinelearningservices.models.Network - :keyword environment: Environment information of this container. - :paramtype environment: - ~azure.mgmt.machinelearningservices.models.ComputeInstanceEnvironmentInfo - """ - super(ComputeInstanceContainer, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.autosave = kwargs.get('autosave', None) - self.gpu = kwargs.get('gpu', None) - self.network = kwargs.get('network', None) - self.environment = kwargs.get('environment', None) - self.services = None - - -class ComputeInstanceCreatedBy(msrest.serialization.Model): - """Describes information on user who created this ComputeInstance. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar user_name: Name of the user. - :vartype user_name: str - :ivar user_org_id: Uniquely identifies user' Azure Active Directory organization. - :vartype user_org_id: str - :ivar user_id: Uniquely identifies the user within his/her organization. - :vartype user_id: str - """ - - _validation = { - 'user_name': {'readonly': True}, - 'user_org_id': {'readonly': True}, - 'user_id': {'readonly': True}, - } - - _attribute_map = { - 'user_name': {'key': 'userName', 'type': 'str'}, - 'user_org_id': {'key': 'userOrgId', 'type': 'str'}, - 'user_id': {'key': 'userId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ComputeInstanceCreatedBy, self).__init__(**kwargs) - self.user_name = None - self.user_org_id = None - self.user_id = None - - -class ComputeInstanceDataDisk(msrest.serialization.Model): - """Defines an Aml Instance DataDisk. - - :ivar caching: Caching type of Data Disk. Possible values include: "None", "ReadOnly", - "ReadWrite". - :vartype caching: str or ~azure.mgmt.machinelearningservices.models.Caching - :ivar disk_size_gb: The initial disk size in gigabytes. - :vartype disk_size_gb: int - :ivar lun: The lun is used to uniquely identify each data disk. If attaching multiple disks, - each should have a distinct lun. - :vartype lun: int - :ivar storage_account_type: type of this storage account. Possible values include: - "Standard_LRS", "Premium_LRS". Default value: "Standard_LRS". - :vartype storage_account_type: str or - ~azure.mgmt.machinelearningservices.models.StorageAccountType - """ - - _attribute_map = { - 'caching': {'key': 'caching', 'type': 'str'}, - 'disk_size_gb': {'key': 'diskSizeGB', 'type': 'int'}, - 'lun': {'key': 'lun', 'type': 'int'}, - 'storage_account_type': {'key': 'storageAccountType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword caching: Caching type of Data Disk. Possible values include: "None", "ReadOnly", - "ReadWrite". - :paramtype caching: str or ~azure.mgmt.machinelearningservices.models.Caching - :keyword disk_size_gb: The initial disk size in gigabytes. - :paramtype disk_size_gb: int - :keyword lun: The lun is used to uniquely identify each data disk. If attaching multiple disks, - each should have a distinct lun. - :paramtype lun: int - :keyword storage_account_type: type of this storage account. Possible values include: - "Standard_LRS", "Premium_LRS". Default value: "Standard_LRS". - :paramtype storage_account_type: str or - ~azure.mgmt.machinelearningservices.models.StorageAccountType - """ - super(ComputeInstanceDataDisk, self).__init__(**kwargs) - self.caching = kwargs.get('caching', None) - self.disk_size_gb = kwargs.get('disk_size_gb', None) - self.lun = kwargs.get('lun', None) - self.storage_account_type = kwargs.get('storage_account_type', "Standard_LRS") - - -class ComputeInstanceDataMount(msrest.serialization.Model): - """Defines an Aml Instance DataMount. - - :ivar source: Source of the ComputeInstance data mount. - :vartype source: str - :ivar source_type: Data source type. Possible values include: "Dataset", "Datastore", "URI". - :vartype source_type: str or ~azure.mgmt.machinelearningservices.models.SourceType - :ivar mount_name: name of the ComputeInstance data mount. - :vartype mount_name: str - :ivar mount_action: Mount Action. Possible values include: "Mount", "Unmount". - :vartype mount_action: str or ~azure.mgmt.machinelearningservices.models.MountAction - :ivar created_by: who this data mount created by. - :vartype created_by: str - :ivar mount_path: Path of this data mount. - :vartype mount_path: str - :ivar mount_state: Mount state. Possible values include: "MountRequested", "Mounted", - "MountFailed", "UnmountRequested", "UnmountFailed", "Unmounted". - :vartype mount_state: str or ~azure.mgmt.machinelearningservices.models.MountState - :ivar mounted_on: The time when the disk mounted. - :vartype mounted_on: ~datetime.datetime - :ivar error: Error of this data mount. - :vartype error: str - """ - - _attribute_map = { - 'source': {'key': 'source', 'type': 'str'}, - 'source_type': {'key': 'sourceType', 'type': 'str'}, - 'mount_name': {'key': 'mountName', 'type': 'str'}, - 'mount_action': {'key': 'mountAction', 'type': 'str'}, - 'created_by': {'key': 'createdBy', 'type': 'str'}, - 'mount_path': {'key': 'mountPath', 'type': 'str'}, - 'mount_state': {'key': 'mountState', 'type': 'str'}, - 'mounted_on': {'key': 'mountedOn', 'type': 'iso-8601'}, - 'error': {'key': 'error', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword source: Source of the ComputeInstance data mount. - :paramtype source: str - :keyword source_type: Data source type. Possible values include: "Dataset", "Datastore", "URI". - :paramtype source_type: str or ~azure.mgmt.machinelearningservices.models.SourceType - :keyword mount_name: name of the ComputeInstance data mount. - :paramtype mount_name: str - :keyword mount_action: Mount Action. Possible values include: "Mount", "Unmount". - :paramtype mount_action: str or ~azure.mgmt.machinelearningservices.models.MountAction - :keyword created_by: who this data mount created by. - :paramtype created_by: str - :keyword mount_path: Path of this data mount. - :paramtype mount_path: str - :keyword mount_state: Mount state. Possible values include: "MountRequested", "Mounted", - "MountFailed", "UnmountRequested", "UnmountFailed", "Unmounted". - :paramtype mount_state: str or ~azure.mgmt.machinelearningservices.models.MountState - :keyword mounted_on: The time when the disk mounted. - :paramtype mounted_on: ~datetime.datetime - :keyword error: Error of this data mount. - :paramtype error: str - """ - super(ComputeInstanceDataMount, self).__init__(**kwargs) - self.source = kwargs.get('source', None) - self.source_type = kwargs.get('source_type', None) - self.mount_name = kwargs.get('mount_name', None) - self.mount_action = kwargs.get('mount_action', None) - self.created_by = kwargs.get('created_by', None) - self.mount_path = kwargs.get('mount_path', None) - self.mount_state = kwargs.get('mount_state', None) - self.mounted_on = kwargs.get('mounted_on', None) - self.error = kwargs.get('error', None) - - -class ComputeInstanceEnvironmentInfo(msrest.serialization.Model): - """Environment information. - - :ivar name: name of environment. - :vartype name: str - :ivar version: version of environment. - :vartype version: str - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword name: name of environment. - :paramtype name: str - :keyword version: version of environment. - :paramtype version: str - """ - super(ComputeInstanceEnvironmentInfo, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.version = kwargs.get('version', None) - - -class ComputeInstanceLastOperation(msrest.serialization.Model): - """The last operation on ComputeInstance. - - :ivar operation_name: Name of the last operation. Possible values include: "Create", "Start", - "Stop", "Restart", "Resize", "Reimage", "Delete". - :vartype operation_name: str or ~azure.mgmt.machinelearningservices.models.OperationName - :ivar operation_time: Time of the last operation. - :vartype operation_time: ~datetime.datetime - :ivar operation_status: Operation status. Possible values include: "InProgress", "Succeeded", - "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ResizeFailed", "ReimageFailed", - "DeleteFailed". - :vartype operation_status: str or ~azure.mgmt.machinelearningservices.models.OperationStatus - :ivar operation_trigger: Trigger of operation. Possible values include: "User", "Schedule", - "IdleShutdown". - :vartype operation_trigger: str or ~azure.mgmt.machinelearningservices.models.OperationTrigger - """ - - _attribute_map = { - 'operation_name': {'key': 'operationName', 'type': 'str'}, - 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'}, - 'operation_status': {'key': 'operationStatus', 'type': 'str'}, - 'operation_trigger': {'key': 'operationTrigger', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword operation_name: Name of the last operation. Possible values include: "Create", - "Start", "Stop", "Restart", "Resize", "Reimage", "Delete". - :paramtype operation_name: str or ~azure.mgmt.machinelearningservices.models.OperationName - :keyword operation_time: Time of the last operation. - :paramtype operation_time: ~datetime.datetime - :keyword operation_status: Operation status. Possible values include: "InProgress", - "Succeeded", "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ResizeFailed", - "ReimageFailed", "DeleteFailed". - :paramtype operation_status: str or ~azure.mgmt.machinelearningservices.models.OperationStatus - :keyword operation_trigger: Trigger of operation. Possible values include: "User", "Schedule", - "IdleShutdown". - :paramtype operation_trigger: str or - ~azure.mgmt.machinelearningservices.models.OperationTrigger - """ - super(ComputeInstanceLastOperation, self).__init__(**kwargs) - self.operation_name = kwargs.get('operation_name', None) - self.operation_time = kwargs.get('operation_time', None) - self.operation_status = kwargs.get('operation_status', None) - self.operation_trigger = kwargs.get('operation_trigger', None) - - -class ComputeInstanceProperties(msrest.serialization.Model): - """Compute Instance properties. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar vm_size: Virtual Machine Size. - :vartype vm_size: str - :ivar subnet: Virtual network subnet resource ID the compute nodes belong to. - :vartype subnet: ~azure.mgmt.machinelearningservices.models.ResourceId - :ivar application_sharing_policy: Policy for sharing applications on this compute instance - among users of parent workspace. If Personal, only the creator can access applications on this - compute instance. When Shared, any workspace user can access applications on this instance - depending on his/her assigned role. Possible values include: "Personal", "Shared". Default - value: "Shared". - :vartype application_sharing_policy: str or - ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy - :ivar autologger_settings: Specifies settings for autologger. - :vartype autologger_settings: - ~azure.mgmt.machinelearningservices.models.ComputeInstanceAutologgerSettings - :ivar ssh_settings: Specifies policy and settings for SSH access. - :vartype ssh_settings: ~azure.mgmt.machinelearningservices.models.ComputeInstanceSshSettings - :ivar custom_services: List of Custom Services added to the compute. - :vartype custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] - :ivar os_image_metadata: Returns metadata about the operating system image for this compute - instance. - :vartype os_image_metadata: ~azure.mgmt.machinelearningservices.models.ImageMetadata - :ivar connectivity_endpoints: Describes all connectivity endpoints available for this - ComputeInstance. - :vartype connectivity_endpoints: - ~azure.mgmt.machinelearningservices.models.ComputeInstanceConnectivityEndpoints - :ivar applications: Describes available applications and their endpoints on this - ComputeInstance. - :vartype applications: - list[~azure.mgmt.machinelearningservices.models.ComputeInstanceApplication] - :ivar created_by: Describes information on user who created this ComputeInstance. - :vartype created_by: ~azure.mgmt.machinelearningservices.models.ComputeInstanceCreatedBy - :ivar errors: Collection of errors encountered on this ComputeInstance. - :vartype errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar state: The current state of this ComputeInstance. Possible values include: "Creating", - "CreateFailed", "Deleting", "Running", "Restarting", "Resizing", "JobRunning", "SettingUp", - "SetupFailed", "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", - "Unknown", "Unusable". - :vartype state: str or ~azure.mgmt.machinelearningservices.models.ComputeInstanceState - :ivar compute_instance_authorization_type: The Compute Instance Authorization type. Available - values are personal (default). Possible values include: "personal". Default value: "personal". - :vartype compute_instance_authorization_type: str or - ~azure.mgmt.machinelearningservices.models.ComputeInstanceAuthorizationType - :ivar enable_os_patching: Enable Auto OS Patching. Possible values are: true, false. - :vartype enable_os_patching: bool - :ivar release_quota_on_stop: Release quota if compute instance stopped. Possible values are: - true - release quota if compute instance stopped. false - don't release quota when compute - instance stopped. - :vartype release_quota_on_stop: bool - :ivar personal_compute_instance_settings: Settings for a personal compute instance. - :vartype personal_compute_instance_settings: - ~azure.mgmt.machinelearningservices.models.PersonalComputeInstanceSettings - :ivar setup_scripts: Details of customized scripts to execute for setting up the cluster. - :vartype setup_scripts: ~azure.mgmt.machinelearningservices.models.SetupScripts - :ivar last_operation: The last operation on ComputeInstance. - :vartype last_operation: - ~azure.mgmt.machinelearningservices.models.ComputeInstanceLastOperation - :ivar schedules: The list of schedules to be applied on the computes. - :vartype schedules: ~azure.mgmt.machinelearningservices.models.ComputeSchedules - :ivar idle_time_before_shutdown: Stops compute instance after user defined period of - inactivity. Time is defined in ISO8601 format. Minimum is 15 min, maximum is 3 days. - :vartype idle_time_before_shutdown: str - :ivar enable_node_public_ip: Enable or disable node public IP address provisioning. Possible - values are: Possible values are: true - Indicates that the compute nodes will have public IPs - provisioned. false - Indicates that the compute nodes will have a private endpoint and no - public IPs. - :vartype enable_node_public_ip: bool - :ivar containers: Describes informations of containers on this ComputeInstance. - :vartype containers: list[~azure.mgmt.machinelearningservices.models.ComputeInstanceContainer] - :ivar data_disks: Describes informations of dataDisks on this ComputeInstance. - :vartype data_disks: list[~azure.mgmt.machinelearningservices.models.ComputeInstanceDataDisk] - :ivar data_mounts: Describes informations of dataMounts on this ComputeInstance. - :vartype data_mounts: list[~azure.mgmt.machinelearningservices.models.ComputeInstanceDataMount] - :ivar versions: ComputeInstance version. - :vartype versions: ~azure.mgmt.machinelearningservices.models.ComputeInstanceVersion - """ - - _validation = { - 'os_image_metadata': {'readonly': True}, - 'connectivity_endpoints': {'readonly': True}, - 'applications': {'readonly': True}, - 'created_by': {'readonly': True}, - 'errors': {'readonly': True}, - 'state': {'readonly': True}, - 'last_operation': {'readonly': True}, - 'containers': {'readonly': True}, - 'data_disks': {'readonly': True}, - 'data_mounts': {'readonly': True}, - 'versions': {'readonly': True}, - } - - _attribute_map = { - 'vm_size': {'key': 'vmSize', 'type': 'str'}, - 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, - 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'}, - 'autologger_settings': {'key': 'autologgerSettings', 'type': 'ComputeInstanceAutologgerSettings'}, - 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'}, - 'custom_services': {'key': 'customServices', 'type': '[CustomService]'}, - 'os_image_metadata': {'key': 'osImageMetadata', 'type': 'ImageMetadata'}, - 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'}, - 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'}, - 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'}, - 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, - 'state': {'key': 'state', 'type': 'str'}, - 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'}, - 'enable_os_patching': {'key': 'enableOSPatching', 'type': 'bool'}, - 'release_quota_on_stop': {'key': 'releaseQuotaOnStop', 'type': 'bool'}, - 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'}, - 'setup_scripts': {'key': 'setupScripts', 'type': 'SetupScripts'}, - 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'}, - 'schedules': {'key': 'schedules', 'type': 'ComputeSchedules'}, - 'idle_time_before_shutdown': {'key': 'idleTimeBeforeShutdown', 'type': 'str'}, - 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'}, - 'containers': {'key': 'containers', 'type': '[ComputeInstanceContainer]'}, - 'data_disks': {'key': 'dataDisks', 'type': '[ComputeInstanceDataDisk]'}, - 'data_mounts': {'key': 'dataMounts', 'type': '[ComputeInstanceDataMount]'}, - 'versions': {'key': 'versions', 'type': 'ComputeInstanceVersion'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword vm_size: Virtual Machine Size. - :paramtype vm_size: str - :keyword subnet: Virtual network subnet resource ID the compute nodes belong to. - :paramtype subnet: ~azure.mgmt.machinelearningservices.models.ResourceId - :keyword application_sharing_policy: Policy for sharing applications on this compute instance - among users of parent workspace. If Personal, only the creator can access applications on this - compute instance. When Shared, any workspace user can access applications on this instance - depending on his/her assigned role. Possible values include: "Personal", "Shared". Default - value: "Shared". - :paramtype application_sharing_policy: str or - ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy - :keyword autologger_settings: Specifies settings for autologger. - :paramtype autologger_settings: - ~azure.mgmt.machinelearningservices.models.ComputeInstanceAutologgerSettings - :keyword ssh_settings: Specifies policy and settings for SSH access. - :paramtype ssh_settings: ~azure.mgmt.machinelearningservices.models.ComputeInstanceSshSettings - :keyword custom_services: List of Custom Services added to the compute. - :paramtype custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] - :keyword compute_instance_authorization_type: The Compute Instance Authorization type. - Available values are personal (default). Possible values include: "personal". Default value: - "personal". - :paramtype compute_instance_authorization_type: str or - ~azure.mgmt.machinelearningservices.models.ComputeInstanceAuthorizationType - :keyword enable_os_patching: Enable Auto OS Patching. Possible values are: true, false. - :paramtype enable_os_patching: bool - :keyword release_quota_on_stop: Release quota if compute instance stopped. Possible values are: - true - release quota if compute instance stopped. false - don't release quota when compute - instance stopped. - :paramtype release_quota_on_stop: bool - :keyword personal_compute_instance_settings: Settings for a personal compute instance. - :paramtype personal_compute_instance_settings: - ~azure.mgmt.machinelearningservices.models.PersonalComputeInstanceSettings - :keyword setup_scripts: Details of customized scripts to execute for setting up the cluster. - :paramtype setup_scripts: ~azure.mgmt.machinelearningservices.models.SetupScripts - :keyword schedules: The list of schedules to be applied on the computes. - :paramtype schedules: ~azure.mgmt.machinelearningservices.models.ComputeSchedules - :keyword idle_time_before_shutdown: Stops compute instance after user defined period of - inactivity. Time is defined in ISO8601 format. Minimum is 15 min, maximum is 3 days. - :paramtype idle_time_before_shutdown: str - :keyword enable_node_public_ip: Enable or disable node public IP address provisioning. Possible - values are: Possible values are: true - Indicates that the compute nodes will have public IPs - provisioned. false - Indicates that the compute nodes will have a private endpoint and no - public IPs. - :paramtype enable_node_public_ip: bool - """ - super(ComputeInstanceProperties, self).__init__(**kwargs) - self.vm_size = kwargs.get('vm_size', None) - self.subnet = kwargs.get('subnet', None) - self.application_sharing_policy = kwargs.get('application_sharing_policy', "Shared") - self.autologger_settings = kwargs.get('autologger_settings', None) - self.ssh_settings = kwargs.get('ssh_settings', None) - self.custom_services = kwargs.get('custom_services', None) - self.os_image_metadata = None - self.connectivity_endpoints = None - self.applications = None - self.created_by = None - self.errors = None - self.state = None - self.compute_instance_authorization_type = kwargs.get('compute_instance_authorization_type', "personal") - self.enable_os_patching = kwargs.get('enable_os_patching', False) - self.release_quota_on_stop = kwargs.get('release_quota_on_stop', False) - self.personal_compute_instance_settings = kwargs.get('personal_compute_instance_settings', None) - self.setup_scripts = kwargs.get('setup_scripts', None) - self.last_operation = None - self.schedules = kwargs.get('schedules', None) - self.idle_time_before_shutdown = kwargs.get('idle_time_before_shutdown', None) - self.enable_node_public_ip = kwargs.get('enable_node_public_ip', True) - self.containers = None - self.data_disks = None - self.data_mounts = None - self.versions = None - - -class ComputeInstanceSshSettings(msrest.serialization.Model): - """Specifies policy and settings for SSH access. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar ssh_public_access: State of the public SSH port. Possible values are: Disabled - - Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the - public ssh port is open and accessible according to the VNet/subnet policy if applicable. - Possible values include: "Enabled", "Disabled". Default value: "Disabled". - :vartype ssh_public_access: str or ~azure.mgmt.machinelearningservices.models.SshPublicAccess - :ivar admin_user_name: Describes the admin user name. - :vartype admin_user_name: str - :ivar ssh_port: Describes the port for connecting through SSH. - :vartype ssh_port: int - :ivar admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t - rsa -b 2048" to generate your SSH key pairs. - :vartype admin_public_key: str - """ - - _validation = { - 'admin_user_name': {'readonly': True}, - 'ssh_port': {'readonly': True}, - } - - _attribute_map = { - 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'}, - 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword ssh_public_access: State of the public SSH port. Possible values are: Disabled - - Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the - public ssh port is open and accessible according to the VNet/subnet policy if applicable. - Possible values include: "Enabled", "Disabled". Default value: "Disabled". - :paramtype ssh_public_access: str or ~azure.mgmt.machinelearningservices.models.SshPublicAccess - :keyword admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen - -t rsa -b 2048" to generate your SSH key pairs. - :paramtype admin_public_key: str - """ - super(ComputeInstanceSshSettings, self).__init__(**kwargs) - self.ssh_public_access = kwargs.get('ssh_public_access', "Disabled") - self.admin_user_name = None - self.ssh_port = None - self.admin_public_key = kwargs.get('admin_public_key', None) - - -class ComputeInstanceVersion(msrest.serialization.Model): - """Version of computeInstance. - - :ivar runtime: Runtime of compute instance. - :vartype runtime: str - """ - - _attribute_map = { - 'runtime': {'key': 'runtime', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword runtime: Runtime of compute instance. - :paramtype runtime: str - """ - super(ComputeInstanceVersion, self).__init__(**kwargs) - self.runtime = kwargs.get('runtime', None) - - -class ComputeResourceSchema(msrest.serialization.Model): - """ComputeResourceSchema. - - :ivar properties: Compute properties. - :vartype properties: ~azure.mgmt.machinelearningservices.models.Compute - """ - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'Compute'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Compute properties. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.Compute - """ - super(ComputeResourceSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class ComputeResource(Resource, ComputeResourceSchema): - """Machine Learning compute object wrapped into ARM resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar properties: Compute properties. - :vartype properties: ~azure.mgmt.machinelearningservices.models.Compute - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar identity: The identity of the resource. - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar location: Specifies the location of the resource. - :vartype location: str - :ivar tags: A set of tags. Contains resource tags defined as key/value pairs. - :vartype tags: dict[str, str] - :ivar sku: The sku of the workspace. - :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - } - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'Compute'}, - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Compute properties. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.Compute - :keyword identity: The identity of the resource. - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword location: Specifies the location of the resource. - :paramtype location: str - :keyword tags: A set of tags. Contains resource tags defined as key/value pairs. - :paramtype tags: dict[str, str] - :keyword sku: The sku of the workspace. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - """ - super(ComputeResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.identity = kwargs.get('identity', None) - self.location = kwargs.get('location', None) - self.tags = kwargs.get('tags', None) - self.sku = kwargs.get('sku', None) - self.id = None - self.name = None - self.type = None - self.system_data = None - - -class ComputeRuntimeDto(msrest.serialization.Model): - """ComputeRuntimeDto. - - :ivar spark_runtime_version: - :vartype spark_runtime_version: str - """ - - _attribute_map = { - 'spark_runtime_version': {'key': 'sparkRuntimeVersion', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword spark_runtime_version: - :paramtype spark_runtime_version: str - """ - super(ComputeRuntimeDto, self).__init__(**kwargs) - self.spark_runtime_version = kwargs.get('spark_runtime_version', None) - - -class ComputeSchedules(msrest.serialization.Model): - """The list of schedules to be applied on the computes. - - :ivar compute_start_stop: The list of compute start stop schedules to be applied. - :vartype compute_start_stop: - list[~azure.mgmt.machinelearningservices.models.ComputeStartStopSchedule] - """ - - _attribute_map = { - 'compute_start_stop': {'key': 'computeStartStop', 'type': '[ComputeStartStopSchedule]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword compute_start_stop: The list of compute start stop schedules to be applied. - :paramtype compute_start_stop: - list[~azure.mgmt.machinelearningservices.models.ComputeStartStopSchedule] - """ - super(ComputeSchedules, self).__init__(**kwargs) - self.compute_start_stop = kwargs.get('compute_start_stop', None) - - -class ComputeStartStopSchedule(msrest.serialization.Model): - """Compute start stop schedule properties. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: A system assigned id for the schedule. - :vartype id: str - :ivar provisioning_status: The current deployment state of schedule. Possible values include: - "Completed", "Provisioning", "Failed". - :vartype provisioning_status: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningStatus - :ivar status: Is the schedule enabled or disabled?. Possible values include: "Enabled", - "Disabled". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus - :ivar action: [Required] The compute power action. Possible values include: "Start", "Stop". - :vartype action: str or ~azure.mgmt.machinelearningservices.models.ComputePowerAction - :ivar trigger_type: [Required] The schedule trigger type. Possible values include: - "Recurrence", "Cron". - :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType - :ivar recurrence: Required if triggerType is Recurrence. - :vartype recurrence: ~azure.mgmt.machinelearningservices.models.Recurrence - :ivar cron: Required if triggerType is Cron. - :vartype cron: ~azure.mgmt.machinelearningservices.models.Cron - :ivar schedule: [Deprecated] Not used any more. - :vartype schedule: ~azure.mgmt.machinelearningservices.models.ScheduleBase - """ - - _validation = { - 'id': {'readonly': True}, - 'provisioning_status': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'action': {'key': 'action', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'}, - 'cron': {'key': 'cron', 'type': 'Cron'}, - 'schedule': {'key': 'schedule', 'type': 'ScheduleBase'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword status: Is the schedule enabled or disabled?. Possible values include: "Enabled", - "Disabled". - :paramtype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus - :keyword action: [Required] The compute power action. Possible values include: "Start", "Stop". - :paramtype action: str or ~azure.mgmt.machinelearningservices.models.ComputePowerAction - :keyword trigger_type: [Required] The schedule trigger type. Possible values include: - "Recurrence", "Cron". - :paramtype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType - :keyword recurrence: Required if triggerType is Recurrence. - :paramtype recurrence: ~azure.mgmt.machinelearningservices.models.Recurrence - :keyword cron: Required if triggerType is Cron. - :paramtype cron: ~azure.mgmt.machinelearningservices.models.Cron - :keyword schedule: [Deprecated] Not used any more. - :paramtype schedule: ~azure.mgmt.machinelearningservices.models.ScheduleBase - """ - super(ComputeStartStopSchedule, self).__init__(**kwargs) - self.id = None - self.provisioning_status = None - self.status = kwargs.get('status', None) - self.action = kwargs.get('action', None) - self.trigger_type = kwargs.get('trigger_type', None) - self.recurrence = kwargs.get('recurrence', None) - self.cron = kwargs.get('cron', None) - self.schedule = kwargs.get('schedule', None) - - -class ContainerResourceRequirements(msrest.serialization.Model): - """Resource requirements for each container instance within an online deployment. - - :ivar container_resource_limits: Container resource limit info:. - :vartype container_resource_limits: - ~azure.mgmt.machinelearningservices.models.ContainerResourceSettings - :ivar container_resource_requests: Container resource request info:. - :vartype container_resource_requests: - ~azure.mgmt.machinelearningservices.models.ContainerResourceSettings - """ - - _attribute_map = { - 'container_resource_limits': {'key': 'containerResourceLimits', 'type': 'ContainerResourceSettings'}, - 'container_resource_requests': {'key': 'containerResourceRequests', 'type': 'ContainerResourceSettings'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword container_resource_limits: Container resource limit info:. - :paramtype container_resource_limits: - ~azure.mgmt.machinelearningservices.models.ContainerResourceSettings - :keyword container_resource_requests: Container resource request info:. - :paramtype container_resource_requests: - ~azure.mgmt.machinelearningservices.models.ContainerResourceSettings - """ - super(ContainerResourceRequirements, self).__init__(**kwargs) - self.container_resource_limits = kwargs.get('container_resource_limits', None) - self.container_resource_requests = kwargs.get('container_resource_requests', None) - - -class ContainerResourceSettings(msrest.serialization.Model): - """ContainerResourceSettings. - - :ivar cpu: Number of vCPUs request/limit for container. More info: - https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. - :vartype cpu: str - :ivar gpu: Number of Nvidia GPU cards request/limit for container. More info: - https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. - :vartype gpu: str - :ivar memory: Memory size request/limit for container. More info: - https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. - :vartype memory: str - """ - - _attribute_map = { - 'cpu': {'key': 'cpu', 'type': 'str'}, - 'gpu': {'key': 'gpu', 'type': 'str'}, - 'memory': {'key': 'memory', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword cpu: Number of vCPUs request/limit for container. More info: - https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. - :paramtype cpu: str - :keyword gpu: Number of Nvidia GPU cards request/limit for container. More info: - https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. - :paramtype gpu: str - :keyword memory: Memory size request/limit for container. More info: - https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. - :paramtype memory: str - """ - super(ContainerResourceSettings, self).__init__(**kwargs) - self.cpu = kwargs.get('cpu', None) - self.gpu = kwargs.get('gpu', None) - self.memory = kwargs.get('memory', None) - - -class CosmosDbSettings(msrest.serialization.Model): - """CosmosDbSettings. - - :ivar collections_throughput: - :vartype collections_throughput: int - """ - - _attribute_map = { - 'collections_throughput': {'key': 'collectionsThroughput', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword collections_throughput: - :paramtype collections_throughput: int - """ - super(CosmosDbSettings, self).__init__(**kwargs) - self.collections_throughput = kwargs.get('collections_throughput', None) - - -class ScheduleActionBase(msrest.serialization.Model): - """ScheduleActionBase. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: JobScheduleAction, CreateMonitorAction, ImportDataAction, EndpointScheduleAction. - - All required parameters must be populated in order to send to Azure. - - :ivar action_type: Required. [Required] Specifies the action type of the schedule.Constant - filled by server. Possible values include: "CreateJob", "InvokeBatchEndpoint", "ImportData", - "CreateMonitor". - :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType - """ - - _validation = { - 'action_type': {'required': True}, - } - - _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, - } - - _subtype_map = { - 'action_type': {'CreateJob': 'JobScheduleAction', 'CreateMonitor': 'CreateMonitorAction', 'ImportData': 'ImportDataAction', 'InvokeBatchEndpoint': 'EndpointScheduleAction'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ScheduleActionBase, self).__init__(**kwargs) - self.action_type = None # type: Optional[str] - - -class CreateMonitorAction(ScheduleActionBase): - """CreateMonitorAction. - - All required parameters must be populated in order to send to Azure. - - :ivar action_type: Required. [Required] Specifies the action type of the schedule.Constant - filled by server. Possible values include: "CreateJob", "InvokeBatchEndpoint", "ImportData", - "CreateMonitor". - :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType - :ivar monitor_definition: Required. [Required] Defines the monitor. - :vartype monitor_definition: ~azure.mgmt.machinelearningservices.models.MonitorDefinition - """ - - _validation = { - 'action_type': {'required': True}, - 'monitor_definition': {'required': True}, - } - - _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, - 'monitor_definition': {'key': 'monitorDefinition', 'type': 'MonitorDefinition'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword monitor_definition: Required. [Required] Defines the monitor. - :paramtype monitor_definition: ~azure.mgmt.machinelearningservices.models.MonitorDefinition - """ - super(CreateMonitorAction, self).__init__(**kwargs) - self.action_type = 'CreateMonitor' # type: str - self.monitor_definition = kwargs['monitor_definition'] - - -class Cron(msrest.serialization.Model): - """The workflow trigger cron for ComputeStartStop schedule type. - - :ivar start_time: The start time in yyyy-MM-ddTHH:mm:ss format. - :vartype start_time: str - :ivar time_zone: Specifies time zone in which the schedule runs. - TimeZone should follow Windows time zone format. Refer: - https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. - :vartype time_zone: str - :ivar expression: [Required] Specifies cron expression of schedule. - The expression should follow NCronTab format. - :vartype expression: str - """ - - _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'expression': {'key': 'expression', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword start_time: The start time in yyyy-MM-ddTHH:mm:ss format. - :paramtype start_time: str - :keyword time_zone: Specifies time zone in which the schedule runs. - TimeZone should follow Windows time zone format. Refer: - https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. - :paramtype time_zone: str - :keyword expression: [Required] Specifies cron expression of schedule. - The expression should follow NCronTab format. - :paramtype expression: str - """ - super(Cron, self).__init__(**kwargs) - self.start_time = kwargs.get('start_time', None) - self.time_zone = kwargs.get('time_zone', "UTC") - self.expression = kwargs.get('expression', None) - - -class TriggerBase(msrest.serialization.Model): - """TriggerBase. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CronTrigger, RecurrenceTrigger. - - All required parameters must be populated in order to send to Azure. - - :ivar end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer - https://en.wikipedia.org/wiki/ISO_8601. - Recommented format would be "2022-06-01T00:00:01" - If not present, the schedule will run indefinitely. - :vartype end_time: str - :ivar start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC - offset. - :vartype start_time: str - :ivar time_zone: Specifies time zone in which the schedule runs. - TimeZone should follow Windows time zone format. Refer: - https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. - :vartype time_zone: str - :ivar trigger_type: Required. [Required].Constant filled by server. Possible values include: - "Recurrence", "Cron". - :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType - """ - - _validation = { - 'trigger_type': {'required': True}, - } - - _attribute_map = { - 'end_time': {'key': 'endTime', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - } - - _subtype_map = { - 'trigger_type': {'Cron': 'CronTrigger', 'Recurrence': 'RecurrenceTrigger'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer - https://en.wikipedia.org/wiki/ISO_8601. - Recommented format would be "2022-06-01T00:00:01" - If not present, the schedule will run indefinitely. - :paramtype end_time: str - :keyword start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC - offset. - :paramtype start_time: str - :keyword time_zone: Specifies time zone in which the schedule runs. - TimeZone should follow Windows time zone format. Refer: - https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. - :paramtype time_zone: str - """ - super(TriggerBase, self).__init__(**kwargs) - self.end_time = kwargs.get('end_time', None) - self.start_time = kwargs.get('start_time', None) - self.time_zone = kwargs.get('time_zone', "UTC") - self.trigger_type = None # type: Optional[str] - - -class CronTrigger(TriggerBase): - """CronTrigger. - - All required parameters must be populated in order to send to Azure. - - :ivar end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer - https://en.wikipedia.org/wiki/ISO_8601. - Recommented format would be "2022-06-01T00:00:01" - If not present, the schedule will run indefinitely. - :vartype end_time: str - :ivar start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC - offset. - :vartype start_time: str - :ivar time_zone: Specifies time zone in which the schedule runs. - TimeZone should follow Windows time zone format. Refer: - https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. - :vartype time_zone: str - :ivar trigger_type: Required. [Required].Constant filled by server. Possible values include: - "Recurrence", "Cron". - :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType - :ivar expression: Required. [Required] Specifies cron expression of schedule. - The expression should follow NCronTab format. - :vartype expression: str - """ - - _validation = { - 'trigger_type': {'required': True}, - 'expression': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'end_time': {'key': 'endTime', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'expression': {'key': 'expression', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer - https://en.wikipedia.org/wiki/ISO_8601. - Recommented format would be "2022-06-01T00:00:01" - If not present, the schedule will run indefinitely. - :paramtype end_time: str - :keyword start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC - offset. - :paramtype start_time: str - :keyword time_zone: Specifies time zone in which the schedule runs. - TimeZone should follow Windows time zone format. Refer: - https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. - :paramtype time_zone: str - :keyword expression: Required. [Required] Specifies cron expression of schedule. - The expression should follow NCronTab format. - :paramtype expression: str - """ - super(CronTrigger, self).__init__(**kwargs) - self.trigger_type = 'Cron' # type: str - self.expression = kwargs['expression'] - - -class CsvExportSummary(ExportSummary): - """CsvExportSummary. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar end_date_time: The time when the export was completed. - :vartype end_date_time: ~datetime.datetime - :ivar exported_row_count: The total number of labeled datapoints exported. - :vartype exported_row_count: long - :ivar format: Required. [Required] The format of exported labels, also as the - discriminator.Constant filled by server. Possible values include: "Dataset", "Coco", "CSV". - :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType - :ivar labeling_job_id: Name and identifier of the job containing exported labels. - :vartype labeling_job_id: str - :ivar start_date_time: The time when the export was requested. - :vartype start_date_time: ~datetime.datetime - :ivar container_name: The container name to which the labels will be exported. - :vartype container_name: str - :ivar snapshot_path: The output path where the labels will be exported. - :vartype snapshot_path: str - """ - - _validation = { - 'end_date_time': {'readonly': True}, - 'exported_row_count': {'readonly': True}, - 'format': {'required': True}, - 'labeling_job_id': {'readonly': True}, - 'start_date_time': {'readonly': True}, - 'container_name': {'readonly': True}, - 'snapshot_path': {'readonly': True}, - } - - _attribute_map = { - 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, - 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, - 'format': {'key': 'format', 'type': 'str'}, - 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, - 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, - 'container_name': {'key': 'containerName', 'type': 'str'}, - 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(CsvExportSummary, self).__init__(**kwargs) - self.format = 'CSV' # type: str - self.container_name = None - self.snapshot_path = None - - -class CustomForecastHorizon(ForecastHorizon): - """The desired maximum forecast horizon in units of time-series frequency. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] Set forecast horizon value selection mode.Constant filled by - server. Possible values include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.ForecastHorizonMode - :ivar value: Required. [Required] Forecast horizon value. - :vartype value: int - """ - - _validation = { - 'mode': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword value: Required. [Required] Forecast horizon value. - :paramtype value: int - """ - super(CustomForecastHorizon, self).__init__(**kwargs) - self.mode = 'Custom' # type: str - self.value = kwargs['value'] - - -class CustomInferencingServer(InferencingServer): - """Custom inference server configurations. - - All required parameters must be populated in order to send to Azure. - - :ivar server_type: Required. [Required] Inferencing server type for various targets.Constant - filled by server. Possible values include: "AzureMLOnline", "AzureMLBatch", "Triton", "Custom". - :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType - :ivar inference_configuration: Inference configuration for custom inferencing. - :vartype inference_configuration: - ~azure.mgmt.machinelearningservices.models.OnlineInferenceConfiguration - """ - - _validation = { - 'server_type': {'required': True}, - } - - _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, - 'inference_configuration': {'key': 'inferenceConfiguration', 'type': 'OnlineInferenceConfiguration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword inference_configuration: Inference configuration for custom inferencing. - :paramtype inference_configuration: - ~azure.mgmt.machinelearningservices.models.OnlineInferenceConfiguration - """ - super(CustomInferencingServer, self).__init__(**kwargs) - self.server_type = 'Custom' # type: str - self.inference_configuration = kwargs.get('inference_configuration', None) - - -class CustomKeys(msrest.serialization.Model): - """Custom Keys credential object. - - :ivar keys: Dictionary of :code:``. - :vartype keys: dict[str, str] - """ - - _attribute_map = { - 'keys': {'key': 'keys', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword keys: Dictionary of :code:``. - :paramtype keys: dict[str, str] - """ - super(CustomKeys, self).__init__(**kwargs) - self.keys = kwargs.get('keys', None) - - -class CustomKeysWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): - """Category:= CustomKeys -AuthType:= CustomKeys (as type discriminator) -Credentials:= {CustomKeys} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.CustomKeys -Target:= {any value} -Use Metadata property bag for ApiVersion and other metadata fields. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar expiry_time: - :vartype expiry_time: ~datetime.datetime - :ivar metadata: Any object. - :vartype metadata: any - :ivar target: - :vartype target: str - :ivar credentials: Custom Keys credential object. - :vartype credentials: ~azure.mgmt.machinelearningservices.models.CustomKeys - """ - - _validation = { - 'auth_type': {'required': True}, - } - - _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'CustomKeys'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :keyword expiry_time: - :paramtype expiry_time: ~datetime.datetime - :keyword metadata: Any object. - :paramtype metadata: any - :keyword target: - :paramtype target: str - :keyword credentials: Custom Keys credential object. - :paramtype credentials: ~azure.mgmt.machinelearningservices.models.CustomKeys - """ - super(CustomKeysWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'CustomKeys' # type: str - self.credentials = kwargs.get('credentials', None) - - -class CustomMetricThreshold(msrest.serialization.Model): - """CustomMetricThreshold. - - All required parameters must be populated in order to send to Azure. - - :ivar metric: Required. [Required] The user-defined metric to calculate. - :vartype metric: str - :ivar threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - - _validation = { - 'metric': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'metric': {'key': 'metric', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword metric: Required. [Required] The user-defined metric to calculate. - :paramtype metric: str - :keyword threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - super(CustomMetricThreshold, self).__init__(**kwargs) - self.metric = kwargs['metric'] - self.threshold = kwargs.get('threshold', None) - - -class JobInput(msrest.serialization.Model): - """Command job definition. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CustomModelJobInput, LiteralJobInput, MLFlowModelJobInput, MLTableJobInput, TritonModelJobInput, UriFileJobInput, UriFolderJobInput. - - All required parameters must be populated in order to send to Azure. - - :ivar description: Description for the input. - :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - """ - - _validation = { - 'job_input_type': {'required': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - } - - _subtype_map = { - 'job_input_type': {'custom_model': 'CustomModelJobInput', 'literal': 'LiteralJobInput', 'mlflow_model': 'MLFlowModelJobInput', 'mltable': 'MLTableJobInput', 'triton_model': 'TritonModelJobInput', 'uri_file': 'UriFileJobInput', 'uri_folder': 'UriFolderJobInput'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: Description for the input. - :paramtype description: str - """ - super(JobInput, self).__init__(**kwargs) - self.description = kwargs.get('description', None) - self.job_input_type = None # type: Optional[str] - - -class CustomModelJobInput(JobInput, AssetJobInput): - """CustomModelJobInput. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str - :ivar description: Description for the input. - :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - """ - - _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str - :keyword description: Description for the input. - :paramtype description: str - """ - super(CustomModelJobInput, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.uri = kwargs['uri'] - self.job_input_type = 'custom_model' # type: str - self.description = kwargs.get('description', None) - - -class JobOutput(msrest.serialization.Model): - """Job output definition container information on where to find job output/logs. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CustomModelJobOutput, MLFlowModelJobOutput, MLTableJobOutput, TritonModelJobOutput, UriFileJobOutput, UriFolderJobOutput. - - All required parameters must be populated in order to send to Azure. - - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by - server. Possible values include: "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType - """ - - _validation = { - 'job_output_type': {'required': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, - } - - _subtype_map = { - 'job_output_type': {'custom_model': 'CustomModelJobOutput', 'mlflow_model': 'MLFlowModelJobOutput', 'mltable': 'MLTableJobOutput', 'triton_model': 'TritonModelJobOutput', 'uri_file': 'UriFileJobOutput', 'uri_folder': 'UriFolderJobOutput'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: Description for the output. - :paramtype description: str - """ - super(JobOutput, self).__init__(**kwargs) - self.description = kwargs.get('description', None) - self.job_output_type = None # type: Optional[str] - - -class CustomModelJobOutput(JobOutput, AssetJobOutput): - """CustomModelJobOutput. - - All required parameters must be populated in order to send to Azure. - - :ivar asset_name: Output Asset Name. - :vartype asset_name: str - :ivar asset_version: Output Asset Version. - :vartype asset_version: str - :ivar auto_delete_setting: Auto delete setting of output data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", - "Direct". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :ivar uri: Output Asset URI. - :vartype uri: str - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by - server. Possible values include: "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType - """ - - _validation = { - 'job_output_type': {'required': True}, - } - - _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword asset_name: Output Asset Name. - :paramtype asset_name: str - :keyword asset_version: Output Asset Version. - :paramtype asset_version: str - :keyword auto_delete_setting: Auto delete setting of output data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", - "Direct". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :keyword uri: Output Asset URI. - :paramtype uri: str - :keyword description: Description for the output. - :paramtype description: str - """ - super(CustomModelJobOutput, self).__init__(**kwargs) - self.asset_name = kwargs.get('asset_name', None) - self.asset_version = kwargs.get('asset_version', None) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.mode = kwargs.get('mode', None) - self.uri = kwargs.get('uri', None) - self.job_output_type = 'custom_model' # type: str - self.description = kwargs.get('description', None) - - -class MonitoringSignalBase(msrest.serialization.Model): - """MonitoringSignalBase. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CustomMonitoringSignal, DataDriftMonitoringSignal, DataQualityMonitoringSignal, FeatureAttributionDriftMonitoringSignal, GenerationSafetyQualityMonitoringSignal, GenerationTokenStatisticsSignal, ModelPerformanceSignal, PredictionDriftMonitoringSignal. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", - "Enabled". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". - :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType - """ - - _validation = { - 'signal_type': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - } - - _subtype_map = { - 'signal_type': {'Custom': 'CustomMonitoringSignal', 'DataDrift': 'DataDriftMonitoringSignal', 'DataQuality': 'DataQualityMonitoringSignal', 'FeatureAttributionDrift': 'FeatureAttributionDriftMonitoringSignal', 'GenerationSafetyQuality': 'GenerationSafetyQualityMonitoringSignal', 'GenerationTokenStatistics': 'GenerationTokenStatisticsSignal', 'ModelPerformance': 'ModelPerformanceSignal', 'PredictionDrift': 'PredictionDriftMonitoringSignal'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - """ - super(MonitoringSignalBase, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.properties = kwargs.get('properties', None) - self.signal_type = None # type: Optional[str] - - -class CustomMonitoringSignal(MonitoringSignalBase): - """CustomMonitoringSignal. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", - "Enabled". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". - :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType - :ivar component_id: Required. [Required] ARM resource ID of the component resource used to - calculate the custom metrics. - :vartype component_id: str - :ivar input_assets: Monitoring assets to take as input. Key is the component input port name, - value is the data asset. - :vartype input_assets: dict[str, - ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] - :ivar inputs: Extra component parameters to take as input. Key is the component literal input - port name, value is the parameter value. - :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] - :ivar metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. - :vartype metric_thresholds: - list[~azure.mgmt.machinelearningservices.models.CustomMetricThreshold] - :ivar workspace_connection: Required. [Required] A list of metrics to calculate and their - associated thresholds. - :vartype workspace_connection: - ~azure.mgmt.machinelearningservices.models.MonitoringWorkspaceConnection - """ - - _validation = { - 'signal_type': {'required': True}, - 'component_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'metric_thresholds': {'required': True}, - 'workspace_connection': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'input_assets': {'key': 'inputAssets', 'type': '{MonitoringInputDataBase}'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[CustomMetricThreshold]'}, - 'workspace_connection': {'key': 'workspaceConnection', 'type': 'MonitoringWorkspaceConnection'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword component_id: Required. [Required] ARM resource ID of the component resource used to - calculate the custom metrics. - :paramtype component_id: str - :keyword input_assets: Monitoring assets to take as input. Key is the component input port - name, value is the data asset. - :paramtype input_assets: dict[str, - ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] - :keyword inputs: Extra component parameters to take as input. Key is the component literal - input port name, value is the parameter value. - :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] - :keyword metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. - :paramtype metric_thresholds: - list[~azure.mgmt.machinelearningservices.models.CustomMetricThreshold] - :keyword workspace_connection: Required. [Required] A list of metrics to calculate and their - associated thresholds. - :paramtype workspace_connection: - ~azure.mgmt.machinelearningservices.models.MonitoringWorkspaceConnection - """ - super(CustomMonitoringSignal, self).__init__(**kwargs) - self.signal_type = 'Custom' # type: str - self.component_id = kwargs['component_id'] - self.input_assets = kwargs.get('input_assets', None) - self.inputs = kwargs.get('inputs', None) - self.metric_thresholds = kwargs['metric_thresholds'] - self.workspace_connection = kwargs['workspace_connection'] - - -class CustomNCrossValidations(NCrossValidations): - """N-Cross validations are specified by user. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] Mode for determining N-Cross validations.Constant filled by - server. Possible values include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.NCrossValidationsMode - :ivar value: Required. [Required] N-Cross validations value. - :vartype value: int - """ - - _validation = { - 'mode': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword value: Required. [Required] N-Cross validations value. - :paramtype value: int - """ - super(CustomNCrossValidations, self).__init__(**kwargs) - self.mode = 'Custom' # type: str - self.value = kwargs['value'] - - -class CustomSeasonality(Seasonality): - """CustomSeasonality. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] Seasonality mode.Constant filled by server. Possible values - include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.SeasonalityMode - :ivar value: Required. [Required] Seasonality value. - :vartype value: int - """ - - _validation = { - 'mode': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword value: Required. [Required] Seasonality value. - :paramtype value: int - """ - super(CustomSeasonality, self).__init__(**kwargs) - self.mode = 'Custom' # type: str - self.value = kwargs['value'] - - -class CustomService(msrest.serialization.Model): - """Specifies the custom service configuration. - - :ivar additional_properties: Unmatched properties from the message are deserialized to this - collection. - :vartype additional_properties: dict[str, any] - :ivar name: Name of the Custom Service. - :vartype name: str - :ivar image: Describes the Image Specifications. - :vartype image: ~azure.mgmt.machinelearningservices.models.Image - :ivar environment_variables: Environment Variable for the container. - :vartype environment_variables: dict[str, - ~azure.mgmt.machinelearningservices.models.EnvironmentVariable] - :ivar docker: Describes the docker settings for the image. - :vartype docker: ~azure.mgmt.machinelearningservices.models.Docker - :ivar endpoints: Configuring the endpoints for the container. - :vartype endpoints: list[~azure.mgmt.machinelearningservices.models.Endpoint] - :ivar volumes: Configuring the volumes for the container. - :vartype volumes: list[~azure.mgmt.machinelearningservices.models.VolumeDefinition] - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'image': {'key': 'image', 'type': 'Image'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{EnvironmentVariable}'}, - 'docker': {'key': 'docker', 'type': 'Docker'}, - 'endpoints': {'key': 'endpoints', 'type': '[Endpoint]'}, - 'volumes': {'key': 'volumes', 'type': '[VolumeDefinition]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword additional_properties: Unmatched properties from the message are deserialized to this - collection. - :paramtype additional_properties: dict[str, any] - :keyword name: Name of the Custom Service. - :paramtype name: str - :keyword image: Describes the Image Specifications. - :paramtype image: ~azure.mgmt.machinelearningservices.models.Image - :keyword environment_variables: Environment Variable for the container. - :paramtype environment_variables: dict[str, - ~azure.mgmt.machinelearningservices.models.EnvironmentVariable] - :keyword docker: Describes the docker settings for the image. - :paramtype docker: ~azure.mgmt.machinelearningservices.models.Docker - :keyword endpoints: Configuring the endpoints for the container. - :paramtype endpoints: list[~azure.mgmt.machinelearningservices.models.Endpoint] - :keyword volumes: Configuring the volumes for the container. - :paramtype volumes: list[~azure.mgmt.machinelearningservices.models.VolumeDefinition] - """ - super(CustomService, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.name = kwargs.get('name', None) - self.image = kwargs.get('image', None) - self.environment_variables = kwargs.get('environment_variables', None) - self.docker = kwargs.get('docker', None) - self.endpoints = kwargs.get('endpoints', None) - self.volumes = kwargs.get('volumes', None) - - -class CustomTargetLags(TargetLags): - """CustomTargetLags. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] Set target lags mode - Auto/Custom.Constant filled by server. - Possible values include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetLagsMode - :ivar values: Required. [Required] Set target lags values. - :vartype values: list[int] - """ - - _validation = { - 'mode': {'required': True}, - 'values': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[int]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword values: Required. [Required] Set target lags values. - :paramtype values: list[int] - """ - super(CustomTargetLags, self).__init__(**kwargs) - self.mode = 'Custom' # type: str - self.values = kwargs['values'] - - -class CustomTargetRollingWindowSize(TargetRollingWindowSize): - """CustomTargetRollingWindowSize. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Required. [Required] TargetRollingWindowSiz detection mode.Constant filled by - server. Possible values include: "Auto", "Custom". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSizeMode - :ivar value: Required. [Required] TargetRollingWindowSize value. - :vartype value: int - """ - - _validation = { - 'mode': {'required': True}, - 'value': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword value: Required. [Required] TargetRollingWindowSize value. - :paramtype value: int - """ - super(CustomTargetRollingWindowSize, self).__init__(**kwargs) - self.mode = 'Custom' # type: str - self.value = kwargs['value'] - - -class DataImportSource(msrest.serialization.Model): - """DataImportSource. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatabaseSource, FileSystemSource. - - All required parameters must be populated in order to send to Azure. - - :ivar connection: Workspace connection for data import source storage. - :vartype connection: str - :ivar source_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "database", "file_system". - :vartype source_type: str or ~azure.mgmt.machinelearningservices.models.DataImportSourceType - """ - - _validation = { - 'source_type': {'required': True}, - } - - _attribute_map = { - 'connection': {'key': 'connection', 'type': 'str'}, - 'source_type': {'key': 'sourceType', 'type': 'str'}, - } - - _subtype_map = { - 'source_type': {'database': 'DatabaseSource', 'file_system': 'FileSystemSource'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword connection: Workspace connection for data import source storage. - :paramtype connection: str - """ - super(DataImportSource, self).__init__(**kwargs) - self.connection = kwargs.get('connection', None) - self.source_type = None # type: Optional[str] - - -class DatabaseSource(DataImportSource): - """DatabaseSource. - - All required parameters must be populated in order to send to Azure. - - :ivar connection: Workspace connection for data import source storage. - :vartype connection: str - :ivar source_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "database", "file_system". - :vartype source_type: str or ~azure.mgmt.machinelearningservices.models.DataImportSourceType - :ivar query: SQL Query statement for data import Database source. - :vartype query: str - :ivar stored_procedure: SQL StoredProcedure on data import Database source. - :vartype stored_procedure: str - :ivar stored_procedure_params: SQL StoredProcedure parameters. - :vartype stored_procedure_params: list[dict[str, str]] - :ivar table_name: Name of the table on data import Database source. - :vartype table_name: str - """ - - _validation = { - 'source_type': {'required': True}, - } - - _attribute_map = { - 'connection': {'key': 'connection', 'type': 'str'}, - 'source_type': {'key': 'sourceType', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'str'}, - 'stored_procedure': {'key': 'storedProcedure', 'type': 'str'}, - 'stored_procedure_params': {'key': 'storedProcedureParams', 'type': '[{str}]'}, - 'table_name': {'key': 'tableName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword connection: Workspace connection for data import source storage. - :paramtype connection: str - :keyword query: SQL Query statement for data import Database source. - :paramtype query: str - :keyword stored_procedure: SQL StoredProcedure on data import Database source. - :paramtype stored_procedure: str - :keyword stored_procedure_params: SQL StoredProcedure parameters. - :paramtype stored_procedure_params: list[dict[str, str]] - :keyword table_name: Name of the table on data import Database source. - :paramtype table_name: str - """ - super(DatabaseSource, self).__init__(**kwargs) - self.source_type = 'database' # type: str - self.query = kwargs.get('query', None) - self.stored_procedure = kwargs.get('stored_procedure', None) - self.stored_procedure_params = kwargs.get('stored_procedure_params', None) - self.table_name = kwargs.get('table_name', None) - - -class DatabricksSchema(msrest.serialization.Model): - """DatabricksSchema. - - :ivar properties: Properties of Databricks. - :vartype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties - """ - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Properties of Databricks. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties - """ - super(DatabricksSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class Databricks(Compute, DatabricksSchema): - """A DataFactory compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar properties: Properties of Databricks. - :vartype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar compute_location: Location for the underlying compute. - :vartype compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar description: The description of the Machine Learning compute. - :vartype description: str - :ivar created_on: The time at which the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The time at which the compute was last modified. - :vartype modified_on: ~datetime.datetime - :ivar resource_id: ARM resource id of the underlying compute. - :vartype resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI - and AAD exclusively for authentication. - :vartype disable_local_auth: bool - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Properties of Databricks. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties - :keyword compute_location: Location for the underlying compute. - :paramtype compute_location: str - :keyword description: The description of the Machine Learning compute. - :paramtype description: str - :keyword resource_id: ARM resource id of the underlying compute. - :paramtype resource_id: str - :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only - MSI and AAD exclusively for authentication. - :paramtype disable_local_auth: bool - """ - super(Databricks, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'Databricks' # type: str - self.compute_location = kwargs.get('compute_location', None) - self.provisioning_state = None - self.description = kwargs.get('description', None) - self.created_on = None - self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) - self.provisioning_errors = None - self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) - - -class DatabricksComputeSecretsProperties(msrest.serialization.Model): - """Properties of Databricks Compute Secrets. - - :ivar databricks_access_token: access token for databricks account. - :vartype databricks_access_token: str - """ - - _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword databricks_access_token: access token for databricks account. - :paramtype databricks_access_token: str - """ - super(DatabricksComputeSecretsProperties, self).__init__(**kwargs) - self.databricks_access_token = kwargs.get('databricks_access_token', None) - - -class DatabricksComputeSecrets(ComputeSecrets, DatabricksComputeSecretsProperties): - """Secrets related to a Machine Learning compute based on Databricks. - - All required parameters must be populated in order to send to Azure. - - :ivar databricks_access_token: access token for databricks account. - :vartype databricks_access_token: str - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - """ - - _validation = { - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword databricks_access_token: access token for databricks account. - :paramtype databricks_access_token: str - """ - super(DatabricksComputeSecrets, self).__init__(**kwargs) - self.databricks_access_token = kwargs.get('databricks_access_token', None) - self.compute_type = 'Databricks' # type: str - - -class DatabricksProperties(msrest.serialization.Model): - """Properties of Databricks. - - :ivar databricks_access_token: Databricks access token. - :vartype databricks_access_token: str - :ivar workspace_url: Workspace Url. - :vartype workspace_url: str - """ - - _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, - 'workspace_url': {'key': 'workspaceUrl', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword databricks_access_token: Databricks access token. - :paramtype databricks_access_token: str - :keyword workspace_url: Workspace Url. - :paramtype workspace_url: str - """ - super(DatabricksProperties, self).__init__(**kwargs) - self.databricks_access_token = kwargs.get('databricks_access_token', None) - self.workspace_url = kwargs.get('workspace_url', None) - - -class DataCollector(msrest.serialization.Model): - """DataCollector. - - All required parameters must be populated in order to send to Azure. - - :ivar collections: Required. [Required] The collection configuration. Each collection has it - own configuration to collect model data and the name of collection can be arbitrary string. - Model data collector can be used for either payload logging or custom logging or both of them. - Collection request and response are reserved for payload logging, others are for custom - logging. - :vartype collections: dict[str, ~azure.mgmt.machinelearningservices.models.Collection] - :ivar request_logging: The request logging configuration for mdc, it includes advanced logging - settings for all collections. It's optional. - :vartype request_logging: ~azure.mgmt.machinelearningservices.models.RequestLogging - :ivar rolling_rate: When model data is collected to blob storage, we need to roll the data to - different path to avoid logging all of them in a single blob file. - If the rolling rate is hour, all data will be collected in the blob path /yyyy/MM/dd/HH/. - If it's day, all data will be collected in blob path /yyyy/MM/dd/. - The other benefit of rolling path is that model monitoring ui is able to select a time range - of data very quickly. Possible values include: "Year", "Month", "Day", "Hour", "Minute". - :vartype rolling_rate: str or ~azure.mgmt.machinelearningservices.models.RollingRateType - """ - - _validation = { - 'collections': {'required': True}, - } - - _attribute_map = { - 'collections': {'key': 'collections', 'type': '{Collection}'}, - 'request_logging': {'key': 'requestLogging', 'type': 'RequestLogging'}, - 'rolling_rate': {'key': 'rollingRate', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword collections: Required. [Required] The collection configuration. Each collection has it - own configuration to collect model data and the name of collection can be arbitrary string. - Model data collector can be used for either payload logging or custom logging or both of them. - Collection request and response are reserved for payload logging, others are for custom - logging. - :paramtype collections: dict[str, ~azure.mgmt.machinelearningservices.models.Collection] - :keyword request_logging: The request logging configuration for mdc, it includes advanced - logging settings for all collections. It's optional. - :paramtype request_logging: ~azure.mgmt.machinelearningservices.models.RequestLogging - :keyword rolling_rate: When model data is collected to blob storage, we need to roll the data - to different path to avoid logging all of them in a single blob file. - If the rolling rate is hour, all data will be collected in the blob path /yyyy/MM/dd/HH/. - If it's day, all data will be collected in blob path /yyyy/MM/dd/. - The other benefit of rolling path is that model monitoring ui is able to select a time range - of data very quickly. Possible values include: "Year", "Month", "Day", "Hour", "Minute". - :paramtype rolling_rate: str or ~azure.mgmt.machinelearningservices.models.RollingRateType - """ - super(DataCollector, self).__init__(**kwargs) - self.collections = kwargs['collections'] - self.request_logging = kwargs.get('request_logging', None) - self.rolling_rate = kwargs.get('rolling_rate', None) - - -class DataContainer(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.DataContainerProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'DataContainerProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.DataContainerProperties - """ - super(DataContainer, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class DataContainerProperties(AssetContainer): - """Container for data asset versions. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar latest_version: The latest version inside this container. - :vartype latest_version: str - :ivar next_version: The next auto incremental version. - :vartype next_version: str - :ivar data_type: Required. [Required] Specifies the type of data. Possible values include: - "uri_file", "uri_folder", "mltable". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType - """ - - _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'data_type': {'required': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - :keyword data_type: Required. [Required] Specifies the type of data. Possible values include: - "uri_file", "uri_folder", "mltable". - :paramtype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType - """ - super(DataContainerProperties, self).__init__(**kwargs) - self.data_type = kwargs['data_type'] - - -class DataContainerResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of DataContainer entities. - - :ivar next_link: The link to the next page of DataContainer objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type DataContainer. - :vartype value: list[~azure.mgmt.machinelearningservices.models.DataContainer] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[DataContainer]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of DataContainer objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type DataContainer. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.DataContainer] - """ - super(DataContainerResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class DataDriftMonitoringSignal(MonitoringSignalBase): - """DataDriftMonitoringSignal. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", - "Enabled". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". - :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType - :ivar data_segment: The data segment used for scoping on a subset of the data population. - :vartype data_segment: ~azure.mgmt.machinelearningservices.models.MonitoringDataSegment - :ivar feature_data_type_override: A dictionary that maps feature names to their respective data - types. - :vartype feature_data_type_override: dict[str, str or - ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType] - :ivar features: The feature filter which identifies which feature to calculate drift over. - :vartype features: ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterBase - :ivar metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. - :vartype metric_thresholds: - list[~azure.mgmt.machinelearningservices.models.DataDriftMetricThresholdBase] - :ivar production_data: Required. [Required] The data which drift will be calculated for. - :vartype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :ivar reference_data: Required. [Required] The data to calculate drift against. - :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - """ - - _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'data_segment': {'key': 'dataSegment', 'type': 'MonitoringDataSegment'}, - 'feature_data_type_override': {'key': 'featureDataTypeOverride', 'type': '{str}'}, - 'features': {'key': 'features', 'type': 'MonitoringFeatureFilterBase'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[DataDriftMetricThresholdBase]'}, - 'production_data': {'key': 'productionData', 'type': 'MonitoringInputDataBase'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword data_segment: The data segment used for scoping on a subset of the data population. - :paramtype data_segment: ~azure.mgmt.machinelearningservices.models.MonitoringDataSegment - :keyword feature_data_type_override: A dictionary that maps feature names to their respective - data types. - :paramtype feature_data_type_override: dict[str, str or - ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType] - :keyword features: The feature filter which identifies which feature to calculate drift over. - :paramtype features: ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterBase - :keyword metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. - :paramtype metric_thresholds: - list[~azure.mgmt.machinelearningservices.models.DataDriftMetricThresholdBase] - :keyword production_data: Required. [Required] The data which drift will be calculated for. - :paramtype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :keyword reference_data: Required. [Required] The data to calculate drift against. - :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - """ - super(DataDriftMonitoringSignal, self).__init__(**kwargs) - self.signal_type = 'DataDrift' # type: str - self.data_segment = kwargs.get('data_segment', None) - self.feature_data_type_override = kwargs.get('feature_data_type_override', None) - self.features = kwargs.get('features', None) - self.metric_thresholds = kwargs['metric_thresholds'] - self.production_data = kwargs['production_data'] - self.reference_data = kwargs['reference_data'] - - -class DataFactory(Compute): - """A DataFactory compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar compute_location: Location for the underlying compute. - :vartype compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar description: The description of the Machine Learning compute. - :vartype description: str - :ivar created_on: The time at which the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The time at which the compute was last modified. - :vartype modified_on: ~datetime.datetime - :ivar resource_id: ARM resource id of the underlying compute. - :vartype resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI - and AAD exclusively for authentication. - :vartype disable_local_auth: bool - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword compute_location: Location for the underlying compute. - :paramtype compute_location: str - :keyword description: The description of the Machine Learning compute. - :paramtype description: str - :keyword resource_id: ARM resource id of the underlying compute. - :paramtype resource_id: str - :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only - MSI and AAD exclusively for authentication. - :paramtype disable_local_auth: bool - """ - super(DataFactory, self).__init__(**kwargs) - self.compute_type = 'DataFactory' # type: str - - -class DataVersionBaseProperties(AssetBase): - """Data version base definition. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MLTableData, UriFileDataVersion, UriFolderDataVersion. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :vartype is_archived: bool - :ivar data_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "uri_file", "uri_folder", "mltable". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType - :ivar data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. - :vartype data_uri: str - :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual - Property. - :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar stage: Stage in the data lifecycle assigned to this data asset. - :vartype stage: str - """ - - _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, - } - - _subtype_map = { - 'data_type': {'mltable': 'MLTableData', 'uri_file': 'UriFileDataVersion', 'uri_folder': 'UriFolderDataVersion'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :paramtype is_archived: bool - :keyword data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. - :paramtype data_uri: str - :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual - Property. - :paramtype intellectual_property: - ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword stage: Stage in the data lifecycle assigned to this data asset. - :paramtype stage: str - """ - super(DataVersionBaseProperties, self).__init__(**kwargs) - self.data_type = 'DataVersionBaseProperties' # type: str - self.data_uri = kwargs['data_uri'] - self.intellectual_property = kwargs.get('intellectual_property', None) - self.stage = kwargs.get('stage', None) - - -class DataImport(DataVersionBaseProperties): - """DataImport. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :vartype is_archived: bool - :ivar data_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "uri_file", "uri_folder", "mltable". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType - :ivar data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. - :vartype data_uri: str - :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual - Property. - :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar stage: Stage in the data lifecycle assigned to this data asset. - :vartype stage: str - :ivar asset_name: Name of the asset for data import job to create. - :vartype asset_name: str - :ivar source: Source data of the asset to import from. - :vartype source: ~azure.mgmt.machinelearningservices.models.DataImportSource - """ - - _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'source': {'key': 'source', 'type': 'DataImportSource'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :paramtype is_archived: bool - :keyword data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. - :paramtype data_uri: str - :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual - Property. - :paramtype intellectual_property: - ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword stage: Stage in the data lifecycle assigned to this data asset. - :paramtype stage: str - :keyword asset_name: Name of the asset for data import job to create. - :paramtype asset_name: str - :keyword source: Source data of the asset to import from. - :paramtype source: ~azure.mgmt.machinelearningservices.models.DataImportSource - """ - super(DataImport, self).__init__(**kwargs) - self.data_type = 'uri_folder' # type: str - self.asset_name = kwargs.get('asset_name', None) - self.source = kwargs.get('source', None) - - -class DataLakeAnalyticsSchema(msrest.serialization.Model): - """DataLakeAnalyticsSchema. - - :ivar properties: - :vartype properties: - ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties - """ - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsSchemaProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: - :paramtype properties: - ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties - """ - super(DataLakeAnalyticsSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class DataLakeAnalytics(Compute, DataLakeAnalyticsSchema): - """A DataLakeAnalytics compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar properties: - :vartype properties: - ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar compute_location: Location for the underlying compute. - :vartype compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar description: The description of the Machine Learning compute. - :vartype description: str - :ivar created_on: The time at which the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The time at which the compute was last modified. - :vartype modified_on: ~datetime.datetime - :ivar resource_id: ARM resource id of the underlying compute. - :vartype resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI - and AAD exclusively for authentication. - :vartype disable_local_auth: bool - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsSchemaProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: - :paramtype properties: - ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties - :keyword compute_location: Location for the underlying compute. - :paramtype compute_location: str - :keyword description: The description of the Machine Learning compute. - :paramtype description: str - :keyword resource_id: ARM resource id of the underlying compute. - :paramtype resource_id: str - :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only - MSI and AAD exclusively for authentication. - :paramtype disable_local_auth: bool - """ - super(DataLakeAnalytics, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'DataLakeAnalytics' # type: str - self.compute_location = kwargs.get('compute_location', None) - self.provisioning_state = None - self.description = kwargs.get('description', None) - self.created_on = None - self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) - self.provisioning_errors = None - self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) - - -class DataLakeAnalyticsSchemaProperties(msrest.serialization.Model): - """DataLakeAnalyticsSchemaProperties. - - :ivar data_lake_store_account_name: DataLake Store Account Name. - :vartype data_lake_store_account_name: str - """ - - _attribute_map = { - 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword data_lake_store_account_name: DataLake Store Account Name. - :paramtype data_lake_store_account_name: str - """ - super(DataLakeAnalyticsSchemaProperties, self).__init__(**kwargs) - self.data_lake_store_account_name = kwargs.get('data_lake_store_account_name', None) - - -class DataPathAssetReference(AssetReferenceBase): - """Reference to an asset via its path in a datastore. - - All required parameters must be populated in order to send to Azure. - - :ivar reference_type: Required. [Required] Specifies the type of asset reference.Constant - filled by server. Possible values include: "Id", "DataPath", "OutputPath". - :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType - :ivar datastore_id: ARM resource ID of the datastore where the asset is located. - :vartype datastore_id: str - :ivar path: The path of the file/directory in the datastore. - :vartype path: str - """ - - _validation = { - 'reference_type': {'required': True}, - } - - _attribute_map = { - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword datastore_id: ARM resource ID of the datastore where the asset is located. - :paramtype datastore_id: str - :keyword path: The path of the file/directory in the datastore. - :paramtype path: str - """ - super(DataPathAssetReference, self).__init__(**kwargs) - self.reference_type = 'DataPath' # type: str - self.datastore_id = kwargs.get('datastore_id', None) - self.path = kwargs.get('path', None) - - -class DataQualityMonitoringSignal(MonitoringSignalBase): - """DataQualityMonitoringSignal. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", - "Enabled". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". - :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType - :ivar feature_data_type_override: A dictionary that maps feature names to their respective data - types. - :vartype feature_data_type_override: dict[str, str or - ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType] - :ivar features: The features to calculate drift over. - :vartype features: ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterBase - :ivar metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. - :vartype metric_thresholds: - list[~azure.mgmt.machinelearningservices.models.DataQualityMetricThresholdBase] - :ivar production_data: Required. [Required] The data produced by the production service which - drift will be calculated for. - :vartype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :ivar reference_data: Required. [Required] The data to calculate drift against. - :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - """ - - _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'feature_data_type_override': {'key': 'featureDataTypeOverride', 'type': '{str}'}, - 'features': {'key': 'features', 'type': 'MonitoringFeatureFilterBase'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[DataQualityMetricThresholdBase]'}, - 'production_data': {'key': 'productionData', 'type': 'MonitoringInputDataBase'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword feature_data_type_override: A dictionary that maps feature names to their respective - data types. - :paramtype feature_data_type_override: dict[str, str or - ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType] - :keyword features: The features to calculate drift over. - :paramtype features: ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterBase - :keyword metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. - :paramtype metric_thresholds: - list[~azure.mgmt.machinelearningservices.models.DataQualityMetricThresholdBase] - :keyword production_data: Required. [Required] The data produced by the production service - which drift will be calculated for. - :paramtype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :keyword reference_data: Required. [Required] The data to calculate drift against. - :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - """ - super(DataQualityMonitoringSignal, self).__init__(**kwargs) - self.signal_type = 'DataQuality' # type: str - self.feature_data_type_override = kwargs.get('feature_data_type_override', None) - self.features = kwargs.get('features', None) - self.metric_thresholds = kwargs['metric_thresholds'] - self.production_data = kwargs['production_data'] - self.reference_data = kwargs['reference_data'] - - -class DatasetExportSummary(ExportSummary): - """DatasetExportSummary. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar end_date_time: The time when the export was completed. - :vartype end_date_time: ~datetime.datetime - :ivar exported_row_count: The total number of labeled datapoints exported. - :vartype exported_row_count: long - :ivar format: Required. [Required] The format of exported labels, also as the - discriminator.Constant filled by server. Possible values include: "Dataset", "Coco", "CSV". - :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType - :ivar labeling_job_id: Name and identifier of the job containing exported labels. - :vartype labeling_job_id: str - :ivar start_date_time: The time when the export was requested. - :vartype start_date_time: ~datetime.datetime - :ivar labeled_asset_name: The unique name of the labeled data asset. - :vartype labeled_asset_name: str - """ - - _validation = { - 'end_date_time': {'readonly': True}, - 'exported_row_count': {'readonly': True}, - 'format': {'required': True}, - 'labeling_job_id': {'readonly': True}, - 'start_date_time': {'readonly': True}, - 'labeled_asset_name': {'readonly': True}, - } - - _attribute_map = { - 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, - 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, - 'format': {'key': 'format', 'type': 'str'}, - 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, - 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, - 'labeled_asset_name': {'key': 'labeledAssetName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(DatasetExportSummary, self).__init__(**kwargs) - self.format = 'Dataset' # type: str - self.labeled_asset_name = None - - -class Datastore(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.DatastoreProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'DatastoreProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatastoreProperties - """ - super(Datastore, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class DatastoreResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of Datastore entities. - - :ivar next_link: The link to the next page of Datastore objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type Datastore. - :vartype value: list[~azure.mgmt.machinelearningservices.models.Datastore] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Datastore]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of Datastore objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type Datastore. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.Datastore] - """ - super(DatastoreResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class DataVersionBase(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.DataVersionBaseProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'DataVersionBaseProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.DataVersionBaseProperties - """ - super(DataVersionBase, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class DataVersionBaseResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of DataVersionBase entities. - - :ivar next_link: The link to the next page of DataVersionBase objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type DataVersionBase. - :vartype value: list[~azure.mgmt.machinelearningservices.models.DataVersionBase] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[DataVersionBase]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of DataVersionBase objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type DataVersionBase. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.DataVersionBase] - """ - super(DataVersionBaseResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class OnlineScaleSettings(msrest.serialization.Model): - """Online deployment scaling configuration. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DefaultScaleSettings, TargetUtilizationScaleSettings. - - All required parameters must be populated in order to send to Azure. - - :ivar scale_type: Required. [Required] Type of deployment scaling algorithm.Constant filled by - server. Possible values include: "Default", "TargetUtilization". - :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.ScaleType - """ - - _validation = { - 'scale_type': {'required': True}, - } - - _attribute_map = { - 'scale_type': {'key': 'scaleType', 'type': 'str'}, - } - - _subtype_map = { - 'scale_type': {'Default': 'DefaultScaleSettings', 'TargetUtilization': 'TargetUtilizationScaleSettings'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(OnlineScaleSettings, self).__init__(**kwargs) - self.scale_type = None # type: Optional[str] - - -class DefaultScaleSettings(OnlineScaleSettings): - """DefaultScaleSettings. - - All required parameters must be populated in order to send to Azure. - - :ivar scale_type: Required. [Required] Type of deployment scaling algorithm.Constant filled by - server. Possible values include: "Default", "TargetUtilization". - :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.ScaleType - """ - - _validation = { - 'scale_type': {'required': True}, - } - - _attribute_map = { - 'scale_type': {'key': 'scaleType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(DefaultScaleSettings, self).__init__(**kwargs) - self.scale_type = 'Default' # type: str - - -class DeploymentLogs(msrest.serialization.Model): - """DeploymentLogs. - - :ivar content: The retrieved online deployment logs. - :vartype content: str - """ - - _attribute_map = { - 'content': {'key': 'content', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword content: The retrieved online deployment logs. - :paramtype content: str - """ - super(DeploymentLogs, self).__init__(**kwargs) - self.content = kwargs.get('content', None) - - -class DeploymentLogsRequest(msrest.serialization.Model): - """DeploymentLogsRequest. - - :ivar container_type: The type of container to retrieve logs from. Possible values include: - "StorageInitializer", "InferenceServer", "ModelDataCollector". - :vartype container_type: str or ~azure.mgmt.machinelearningservices.models.ContainerType - :ivar tail: The maximum number of lines to tail. - :vartype tail: int - """ - - _attribute_map = { - 'container_type': {'key': 'containerType', 'type': 'str'}, - 'tail': {'key': 'tail', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword container_type: The type of container to retrieve logs from. Possible values include: - "StorageInitializer", "InferenceServer", "ModelDataCollector". - :paramtype container_type: str or ~azure.mgmt.machinelearningservices.models.ContainerType - :keyword tail: The maximum number of lines to tail. - :paramtype tail: int - """ - super(DeploymentLogsRequest, self).__init__(**kwargs) - self.container_type = kwargs.get('container_type', None) - self.tail = kwargs.get('tail', None) - - -class ResourceConfiguration(msrest.serialization.Model): - """ResourceConfiguration. - - :ivar instance_count: Optional number of instances or nodes used by the compute target. - :vartype instance_count: int - :ivar instance_type: Optional type of VM used as supported by the compute target. - :vartype instance_type: str - :ivar locations: Locations where the job can run. - :vartype locations: list[str] - :ivar max_instance_count: Optional max allowed number of instances or nodes to be used by the - compute target. - For use with elastic training, currently supported by PyTorch distribution type only. - :vartype max_instance_count: int - :ivar properties: Additional properties bag. - :vartype properties: dict[str, any] - """ - - _attribute_map = { - 'instance_count': {'key': 'instanceCount', 'type': 'int'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'locations': {'key': 'locations', 'type': '[str]'}, - 'max_instance_count': {'key': 'maxInstanceCount', 'type': 'int'}, - 'properties': {'key': 'properties', 'type': '{object}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword instance_count: Optional number of instances or nodes used by the compute target. - :paramtype instance_count: int - :keyword instance_type: Optional type of VM used as supported by the compute target. - :paramtype instance_type: str - :keyword locations: Locations where the job can run. - :paramtype locations: list[str] - :keyword max_instance_count: Optional max allowed number of instances or nodes to be used by - the compute target. - For use with elastic training, currently supported by PyTorch distribution type only. - :paramtype max_instance_count: int - :keyword properties: Additional properties bag. - :paramtype properties: dict[str, any] - """ - super(ResourceConfiguration, self).__init__(**kwargs) - self.instance_count = kwargs.get('instance_count', 1) - self.instance_type = kwargs.get('instance_type', None) - self.locations = kwargs.get('locations', None) - self.max_instance_count = kwargs.get('max_instance_count', None) - self.properties = kwargs.get('properties', None) - - -class DeploymentResourceConfiguration(ResourceConfiguration): - """DeploymentResourceConfiguration. - - :ivar instance_count: Optional number of instances or nodes used by the compute target. - :vartype instance_count: int - :ivar instance_type: Optional type of VM used as supported by the compute target. - :vartype instance_type: str - :ivar locations: Locations where the job can run. - :vartype locations: list[str] - :ivar max_instance_count: Optional max allowed number of instances or nodes to be used by the - compute target. - For use with elastic training, currently supported by PyTorch distribution type only. - :vartype max_instance_count: int - :ivar properties: Additional properties bag. - :vartype properties: dict[str, any] - """ - - _attribute_map = { - 'instance_count': {'key': 'instanceCount', 'type': 'int'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'locations': {'key': 'locations', 'type': '[str]'}, - 'max_instance_count': {'key': 'maxInstanceCount', 'type': 'int'}, - 'properties': {'key': 'properties', 'type': '{object}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword instance_count: Optional number of instances or nodes used by the compute target. - :paramtype instance_count: int - :keyword instance_type: Optional type of VM used as supported by the compute target. - :paramtype instance_type: str - :keyword locations: Locations where the job can run. - :paramtype locations: list[str] - :keyword max_instance_count: Optional max allowed number of instances or nodes to be used by - the compute target. - For use with elastic training, currently supported by PyTorch distribution type only. - :paramtype max_instance_count: int - :keyword properties: Additional properties bag. - :paramtype properties: dict[str, any] - """ - super(DeploymentResourceConfiguration, self).__init__(**kwargs) - - -class DiagnoseRequestProperties(msrest.serialization.Model): - """DiagnoseRequestProperties. - - :ivar application_insights: Setting for diagnosing dependent application insights. - :vartype application_insights: dict[str, any] - :ivar container_registry: Setting for diagnosing dependent container registry. - :vartype container_registry: dict[str, any] - :ivar dns_resolution: Setting for diagnosing dns resolution. - :vartype dns_resolution: dict[str, any] - :ivar key_vault: Setting for diagnosing dependent key vault. - :vartype key_vault: dict[str, any] - :ivar nsg: Setting for diagnosing network security group. - :vartype nsg: dict[str, any] - :ivar others: Setting for diagnosing unclassified category of problems. - :vartype others: dict[str, any] - :ivar resource_lock: Setting for diagnosing resource lock. - :vartype resource_lock: dict[str, any] - :ivar storage_account: Setting for diagnosing dependent storage account. - :vartype storage_account: dict[str, any] - :ivar udr: Setting for diagnosing user defined routing. - :vartype udr: dict[str, any] - """ - - _attribute_map = { - 'application_insights': {'key': 'applicationInsights', 'type': '{object}'}, - 'container_registry': {'key': 'containerRegistry', 'type': '{object}'}, - 'dns_resolution': {'key': 'dnsResolution', 'type': '{object}'}, - 'key_vault': {'key': 'keyVault', 'type': '{object}'}, - 'nsg': {'key': 'nsg', 'type': '{object}'}, - 'others': {'key': 'others', 'type': '{object}'}, - 'resource_lock': {'key': 'resourceLock', 'type': '{object}'}, - 'storage_account': {'key': 'storageAccount', 'type': '{object}'}, - 'udr': {'key': 'udr', 'type': '{object}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword application_insights: Setting for diagnosing dependent application insights. - :paramtype application_insights: dict[str, any] - :keyword container_registry: Setting for diagnosing dependent container registry. - :paramtype container_registry: dict[str, any] - :keyword dns_resolution: Setting for diagnosing dns resolution. - :paramtype dns_resolution: dict[str, any] - :keyword key_vault: Setting for diagnosing dependent key vault. - :paramtype key_vault: dict[str, any] - :keyword nsg: Setting for diagnosing network security group. - :paramtype nsg: dict[str, any] - :keyword others: Setting for diagnosing unclassified category of problems. - :paramtype others: dict[str, any] - :keyword resource_lock: Setting for diagnosing resource lock. - :paramtype resource_lock: dict[str, any] - :keyword storage_account: Setting for diagnosing dependent storage account. - :paramtype storage_account: dict[str, any] - :keyword udr: Setting for diagnosing user defined routing. - :paramtype udr: dict[str, any] - """ - super(DiagnoseRequestProperties, self).__init__(**kwargs) - self.application_insights = kwargs.get('application_insights', None) - self.container_registry = kwargs.get('container_registry', None) - self.dns_resolution = kwargs.get('dns_resolution', None) - self.key_vault = kwargs.get('key_vault', None) - self.nsg = kwargs.get('nsg', None) - self.others = kwargs.get('others', None) - self.resource_lock = kwargs.get('resource_lock', None) - self.storage_account = kwargs.get('storage_account', None) - self.udr = kwargs.get('udr', None) - - -class DiagnoseResponseResult(msrest.serialization.Model): - """DiagnoseResponseResult. - - :ivar value: - :vartype value: ~azure.mgmt.machinelearningservices.models.DiagnoseResponseResultValue - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': 'DiagnoseResponseResultValue'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword value: - :paramtype value: ~azure.mgmt.machinelearningservices.models.DiagnoseResponseResultValue - """ - super(DiagnoseResponseResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class DiagnoseResponseResultValue(msrest.serialization.Model): - """DiagnoseResponseResultValue. - - :ivar user_defined_route_results: - :vartype user_defined_route_results: - list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :ivar network_security_rule_results: - :vartype network_security_rule_results: - list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :ivar resource_lock_results: - :vartype resource_lock_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :ivar dns_resolution_results: - :vartype dns_resolution_results: - list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :ivar storage_account_results: - :vartype storage_account_results: - list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :ivar key_vault_results: - :vartype key_vault_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :ivar container_registry_results: - :vartype container_registry_results: - list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :ivar application_insights_results: - :vartype application_insights_results: - list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :ivar other_results: - :vartype other_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - """ - - _attribute_map = { - 'user_defined_route_results': {'key': 'userDefinedRouteResults', 'type': '[DiagnoseResult]'}, - 'network_security_rule_results': {'key': 'networkSecurityRuleResults', 'type': '[DiagnoseResult]'}, - 'resource_lock_results': {'key': 'resourceLockResults', 'type': '[DiagnoseResult]'}, - 'dns_resolution_results': {'key': 'dnsResolutionResults', 'type': '[DiagnoseResult]'}, - 'storage_account_results': {'key': 'storageAccountResults', 'type': '[DiagnoseResult]'}, - 'key_vault_results': {'key': 'keyVaultResults', 'type': '[DiagnoseResult]'}, - 'container_registry_results': {'key': 'containerRegistryResults', 'type': '[DiagnoseResult]'}, - 'application_insights_results': {'key': 'applicationInsightsResults', 'type': '[DiagnoseResult]'}, - 'other_results': {'key': 'otherResults', 'type': '[DiagnoseResult]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword user_defined_route_results: - :paramtype user_defined_route_results: - list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :keyword network_security_rule_results: - :paramtype network_security_rule_results: - list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :keyword resource_lock_results: - :paramtype resource_lock_results: - list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :keyword dns_resolution_results: - :paramtype dns_resolution_results: - list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :keyword storage_account_results: - :paramtype storage_account_results: - list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :keyword key_vault_results: - :paramtype key_vault_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :keyword container_registry_results: - :paramtype container_registry_results: - list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :keyword application_insights_results: - :paramtype application_insights_results: - list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - :keyword other_results: - :paramtype other_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] - """ - super(DiagnoseResponseResultValue, self).__init__(**kwargs) - self.user_defined_route_results = kwargs.get('user_defined_route_results', None) - self.network_security_rule_results = kwargs.get('network_security_rule_results', None) - self.resource_lock_results = kwargs.get('resource_lock_results', None) - self.dns_resolution_results = kwargs.get('dns_resolution_results', None) - self.storage_account_results = kwargs.get('storage_account_results', None) - self.key_vault_results = kwargs.get('key_vault_results', None) - self.container_registry_results = kwargs.get('container_registry_results', None) - self.application_insights_results = kwargs.get('application_insights_results', None) - self.other_results = kwargs.get('other_results', None) - - -class DiagnoseResult(msrest.serialization.Model): - """Result of Diagnose. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar code: Code for workspace setup error. - :vartype code: str - :ivar level: Level of workspace setup error. Possible values include: "Warning", "Error", - "Information". - :vartype level: str or ~azure.mgmt.machinelearningservices.models.DiagnoseResultLevel - :ivar message: Message of workspace setup error. - :vartype message: str - """ - - _validation = { - 'code': {'readonly': True}, - 'level': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(DiagnoseResult, self).__init__(**kwargs) - self.code = None - self.level = None - self.message = None - - -class DiagnoseWorkspaceParameters(msrest.serialization.Model): - """Parameters to diagnose a workspace. - - :ivar value: - :vartype value: ~azure.mgmt.machinelearningservices.models.DiagnoseRequestProperties - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': 'DiagnoseRequestProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword value: - :paramtype value: ~azure.mgmt.machinelearningservices.models.DiagnoseRequestProperties - """ - super(DiagnoseWorkspaceParameters, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class DistributionConfiguration(msrest.serialization.Model): - """Base definition for job distribution configuration. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: Mpi, PyTorch, Ray, TensorFlow. - - All required parameters must be populated in order to send to Azure. - - :ivar distribution_type: Required. [Required] Specifies the type of distribution - framework.Constant filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi", - "Ray". - :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType - """ - - _validation = { - 'distribution_type': {'required': True}, - } - - _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, - } - - _subtype_map = { - 'distribution_type': {'Mpi': 'Mpi', 'PyTorch': 'PyTorch', 'Ray': 'Ray', 'TensorFlow': 'TensorFlow'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(DistributionConfiguration, self).__init__(**kwargs) - self.distribution_type = None # type: Optional[str] - - -class Docker(msrest.serialization.Model): - """Docker. - - :ivar additional_properties: Unmatched properties from the message are deserialized to this - collection. - :vartype additional_properties: dict[str, any] - :ivar privileged: Indicate whether container shall run in privileged or non-privileged mode. - :vartype privileged: bool - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'privileged': {'key': 'privileged', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword additional_properties: Unmatched properties from the message are deserialized to this - collection. - :paramtype additional_properties: dict[str, any] - :keyword privileged: Indicate whether container shall run in privileged or non-privileged mode. - :paramtype privileged: bool - """ - super(Docker, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.privileged = kwargs.get('privileged', None) - - -class EmailMonitoringAlertNotificationSettings(MonitoringAlertNotificationSettingsBase): - """EmailMonitoringAlertNotificationSettings. - - All required parameters must be populated in order to send to Azure. - - :ivar alert_notification_type: Required. [Required] Specifies the type of signal to - monitor.Constant filled by server. Possible values include: "AzureMonitor", "Email". - :vartype alert_notification_type: str or - ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationType - :ivar email_notification_setting: Configuration for notification. - :vartype email_notification_setting: - ~azure.mgmt.machinelearningservices.models.NotificationSetting - """ - - _validation = { - 'alert_notification_type': {'required': True}, - } - - _attribute_map = { - 'alert_notification_type': {'key': 'alertNotificationType', 'type': 'str'}, - 'email_notification_setting': {'key': 'emailNotificationSetting', 'type': 'NotificationSetting'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword email_notification_setting: Configuration for notification. - :paramtype email_notification_setting: - ~azure.mgmt.machinelearningservices.models.NotificationSetting - """ - super(EmailMonitoringAlertNotificationSettings, self).__init__(**kwargs) - self.alert_notification_type = 'Email' # type: str - self.email_notification_setting = kwargs.get('email_notification_setting', None) - - -class EncryptionKeyVaultUpdateProperties(msrest.serialization.Model): - """EncryptionKeyVaultUpdateProperties. - - All required parameters must be populated in order to send to Azure. - - :ivar key_identifier: Required. - :vartype key_identifier: str - """ - - _validation = { - 'key_identifier': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword key_identifier: Required. - :paramtype key_identifier: str - """ - super(EncryptionKeyVaultUpdateProperties, self).__init__(**kwargs) - self.key_identifier = kwargs['key_identifier'] - - -class EncryptionProperty(msrest.serialization.Model): - """EncryptionProperty. - - All required parameters must be populated in order to send to Azure. - - :ivar cosmos_db_resource_id: The byok cosmosdb account that customer brings to store customer's - data - with encryption. - :vartype cosmos_db_resource_id: str - :ivar identity: Identity to be used with the keyVault. - :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityForCmk - :ivar key_vault_properties: Required. KeyVault details to do the encryption. - :vartype key_vault_properties: ~azure.mgmt.machinelearningservices.models.KeyVaultProperties - :ivar search_account_resource_id: The byok search account that customer brings to store - customer's data - with encryption. - :vartype search_account_resource_id: str - :ivar status: Required. Indicates whether or not the encryption is enabled for the workspace. - Possible values include: "Enabled", "Disabled". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.EncryptionStatus - :ivar storage_account_resource_id: The byok storage account that customer brings to store - customer's data - with encryption. - :vartype storage_account_resource_id: str - """ - - _validation = { - 'key_vault_properties': {'required': True}, - 'status': {'required': True}, - } - - _attribute_map = { - 'cosmos_db_resource_id': {'key': 'cosmosDbResourceId', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityForCmk'}, - 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'}, - 'search_account_resource_id': {'key': 'searchAccountResourceId', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'storage_account_resource_id': {'key': 'storageAccountResourceId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword cosmos_db_resource_id: The byok cosmosdb account that customer brings to store - customer's data - with encryption. - :paramtype cosmos_db_resource_id: str - :keyword identity: Identity to be used with the keyVault. - :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityForCmk - :keyword key_vault_properties: Required. KeyVault details to do the encryption. - :paramtype key_vault_properties: ~azure.mgmt.machinelearningservices.models.KeyVaultProperties - :keyword search_account_resource_id: The byok search account that customer brings to store - customer's data - with encryption. - :paramtype search_account_resource_id: str - :keyword status: Required. Indicates whether or not the encryption is enabled for the - workspace. Possible values include: "Enabled", "Disabled". - :paramtype status: str or ~azure.mgmt.machinelearningservices.models.EncryptionStatus - :keyword storage_account_resource_id: The byok storage account that customer brings to store - customer's data - with encryption. - :paramtype storage_account_resource_id: str - """ - super(EncryptionProperty, self).__init__(**kwargs) - self.cosmos_db_resource_id = kwargs.get('cosmos_db_resource_id', None) - self.identity = kwargs.get('identity', None) - self.key_vault_properties = kwargs['key_vault_properties'] - self.search_account_resource_id = kwargs.get('search_account_resource_id', None) - self.status = kwargs['status'] - self.storage_account_resource_id = kwargs.get('storage_account_resource_id', None) - - -class EncryptionUpdateProperties(msrest.serialization.Model): - """EncryptionUpdateProperties. - - All required parameters must be populated in order to send to Azure. - - :ivar key_vault_properties: Required. - :vartype key_vault_properties: - ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultUpdateProperties - """ - - _validation = { - 'key_vault_properties': {'required': True}, - } - - _attribute_map = { - 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'EncryptionKeyVaultUpdateProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword key_vault_properties: Required. - :paramtype key_vault_properties: - ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultUpdateProperties - """ - super(EncryptionUpdateProperties, self).__init__(**kwargs) - self.key_vault_properties = kwargs['key_vault_properties'] - - -class Endpoint(msrest.serialization.Model): - """Endpoint. - - :ivar protocol: Protocol over which communication will happen over this endpoint. Possible - values include: "tcp", "udp", "http". Default value: "tcp". - :vartype protocol: str or ~azure.mgmt.machinelearningservices.models.Protocol - :ivar name: Name of the Endpoint. - :vartype name: str - :ivar target: Application port inside the container. - :vartype target: int - :ivar published: Port over which the application is exposed from container. - :vartype published: int - :ivar host_ip: Host IP over which the application is exposed from the container. - :vartype host_ip: str - """ - - _attribute_map = { - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'int'}, - 'published': {'key': 'published', 'type': 'int'}, - 'host_ip': {'key': 'hostIp', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword protocol: Protocol over which communication will happen over this endpoint. Possible - values include: "tcp", "udp", "http". Default value: "tcp". - :paramtype protocol: str or ~azure.mgmt.machinelearningservices.models.Protocol - :keyword name: Name of the Endpoint. - :paramtype name: str - :keyword target: Application port inside the container. - :paramtype target: int - :keyword published: Port over which the application is exposed from container. - :paramtype published: int - :keyword host_ip: Host IP over which the application is exposed from the container. - :paramtype host_ip: str - """ - super(Endpoint, self).__init__(**kwargs) - self.protocol = kwargs.get('protocol', "tcp") - self.name = kwargs.get('name', None) - self.target = kwargs.get('target', None) - self.published = kwargs.get('published', None) - self.host_ip = kwargs.get('host_ip', None) - - -class EndpointAuthKeys(msrest.serialization.Model): - """Keys for endpoint authentication. - - :ivar primary_key: The primary key. - :vartype primary_key: str - :ivar secondary_key: The secondary key. - :vartype secondary_key: str - """ - - _attribute_map = { - 'primary_key': {'key': 'primaryKey', 'type': 'str'}, - 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword primary_key: The primary key. - :paramtype primary_key: str - :keyword secondary_key: The secondary key. - :paramtype secondary_key: str - """ - super(EndpointAuthKeys, self).__init__(**kwargs) - self.primary_key = kwargs.get('primary_key', None) - self.secondary_key = kwargs.get('secondary_key', None) - - -class EndpointAuthToken(msrest.serialization.Model): - """Service Token. - - :ivar access_token: Access token for endpoint authentication. - :vartype access_token: str - :ivar expiry_time_utc: Access token expiry time (UTC). - :vartype expiry_time_utc: long - :ivar refresh_after_time_utc: Refresh access token after time (UTC). - :vartype refresh_after_time_utc: long - :ivar token_type: Access token type. - :vartype token_type: str - """ - - _attribute_map = { - 'access_token': {'key': 'accessToken', 'type': 'str'}, - 'expiry_time_utc': {'key': 'expiryTimeUtc', 'type': 'long'}, - 'refresh_after_time_utc': {'key': 'refreshAfterTimeUtc', 'type': 'long'}, - 'token_type': {'key': 'tokenType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword access_token: Access token for endpoint authentication. - :paramtype access_token: str - :keyword expiry_time_utc: Access token expiry time (UTC). - :paramtype expiry_time_utc: long - :keyword refresh_after_time_utc: Refresh access token after time (UTC). - :paramtype refresh_after_time_utc: long - :keyword token_type: Access token type. - :paramtype token_type: str - """ - super(EndpointAuthToken, self).__init__(**kwargs) - self.access_token = kwargs.get('access_token', None) - self.expiry_time_utc = kwargs.get('expiry_time_utc', 0) - self.refresh_after_time_utc = kwargs.get('refresh_after_time_utc', 0) - self.token_type = kwargs.get('token_type', None) - - -class EndpointScheduleAction(ScheduleActionBase): - """EndpointScheduleAction. - - All required parameters must be populated in order to send to Azure. - - :ivar action_type: Required. [Required] Specifies the action type of the schedule.Constant - filled by server. Possible values include: "CreateJob", "InvokeBatchEndpoint", "ImportData", - "CreateMonitor". - :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType - :ivar endpoint_invocation_definition: Required. [Required] Defines Schedule action definition - details. - - - .. raw:: html - - . - :vartype endpoint_invocation_definition: any - """ - - _validation = { - 'action_type': {'required': True}, - 'endpoint_invocation_definition': {'required': True}, - } - - _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, - 'endpoint_invocation_definition': {'key': 'endpointInvocationDefinition', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword endpoint_invocation_definition: Required. [Required] Defines Schedule action - definition details. - - - .. raw:: html - - . - :paramtype endpoint_invocation_definition: any - """ - super(EndpointScheduleAction, self).__init__(**kwargs) - self.action_type = 'InvokeBatchEndpoint' # type: str - self.endpoint_invocation_definition = kwargs['endpoint_invocation_definition'] - - -class EnvironmentContainer(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentContainerProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'EnvironmentContainerProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: - ~azure.mgmt.machinelearningservices.models.EnvironmentContainerProperties - """ - super(EnvironmentContainer, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class EnvironmentContainerProperties(AssetContainer): - """Container for environment specification versions. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar latest_version: The latest version inside this container. - :vartype latest_version: str - :ivar next_version: The next auto incremental version. - :vartype next_version: str - :ivar provisioning_state: Provisioning state for the environment container. Possible values - include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState - """ - - _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - """ - super(EnvironmentContainerProperties, self).__init__(**kwargs) - self.provisioning_state = None - - -class EnvironmentContainerResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of EnvironmentContainer entities. - - :ivar next_link: The link to the next page of EnvironmentContainer objects. If null, there are - no additional pages. - :vartype next_link: str - :ivar value: An array of objects of type EnvironmentContainer. - :vartype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[EnvironmentContainer]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of EnvironmentContainer objects. If null, there - are no additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type EnvironmentContainer. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] - """ - super(EnvironmentContainerResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class EnvironmentVariable(msrest.serialization.Model): - """EnvironmentVariable. - - :ivar additional_properties: Unmatched properties from the message are deserialized to this - collection. - :vartype additional_properties: dict[str, any] - :ivar type: Type of the Environment Variable. Possible values are: local - For local variable. - Possible values include: "local". Default value: "local". - :vartype type: str or ~azure.mgmt.machinelearningservices.models.EnvironmentVariableType - :ivar value: Value of the Environment variable. - :vartype value: str - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword additional_properties: Unmatched properties from the message are deserialized to this - collection. - :paramtype additional_properties: dict[str, any] - :keyword type: Type of the Environment Variable. Possible values are: local - For local - variable. Possible values include: "local". Default value: "local". - :paramtype type: str or ~azure.mgmt.machinelearningservices.models.EnvironmentVariableType - :keyword value: Value of the Environment variable. - :paramtype value: str - """ - super(EnvironmentVariable, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', "local") - self.value = kwargs.get('value', None) - - -class EnvironmentVersion(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentVersionProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'EnvironmentVersionProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentVersionProperties - """ - super(EnvironmentVersion, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class EnvironmentVersionProperties(AssetBase): - """Environment version details. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :vartype is_archived: bool - :ivar auto_rebuild: Defines if image needs to be rebuilt based on base image changes. Possible - values include: "Disabled", "OnBaseImageUpdate". - :vartype auto_rebuild: str or ~azure.mgmt.machinelearningservices.models.AutoRebuildSetting - :ivar build: Configuration settings for Docker build context. - :vartype build: ~azure.mgmt.machinelearningservices.models.BuildContext - :ivar conda_file: Standard configuration file used by Conda that lets you install any kind of - package, including Python, R, and C/C++ packages. - - - .. raw:: html - - . - :vartype conda_file: str - :ivar environment_type: Environment type is either user managed or curated by the Azure ML - service - - - .. raw:: html - - . Possible values include: "Curated", "UserCreated". - :vartype environment_type: str or ~azure.mgmt.machinelearningservices.models.EnvironmentType - :ivar image: Name of the image that will be used for the environment. - - - .. raw:: html - - . - :vartype image: str - :ivar inference_config: Defines configuration specific to inference. - :vartype inference_config: - ~azure.mgmt.machinelearningservices.models.InferenceContainerProperties - :ivar intellectual_property: Intellectual Property details. Used if environment is an - Intellectual Property. - :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar os_type: The OS type of the environment. Possible values include: "Linux", "Windows". - :vartype os_type: str or ~azure.mgmt.machinelearningservices.models.OperatingSystemType - :ivar provisioning_state: Provisioning state for the environment version. Possible values - include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState - :ivar stage: Stage in the environment lifecycle assigned to this environment. - :vartype stage: str - """ - - _validation = { - 'environment_type': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'auto_rebuild': {'key': 'autoRebuild', 'type': 'str'}, - 'build': {'key': 'build', 'type': 'BuildContext'}, - 'conda_file': {'key': 'condaFile', 'type': 'str'}, - 'environment_type': {'key': 'environmentType', 'type': 'str'}, - 'image': {'key': 'image', 'type': 'str'}, - 'inference_config': {'key': 'inferenceConfig', 'type': 'InferenceContainerProperties'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'os_type': {'key': 'osType', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'stage': {'key': 'stage', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :paramtype is_archived: bool - :keyword auto_rebuild: Defines if image needs to be rebuilt based on base image changes. - Possible values include: "Disabled", "OnBaseImageUpdate". - :paramtype auto_rebuild: str or ~azure.mgmt.machinelearningservices.models.AutoRebuildSetting - :keyword build: Configuration settings for Docker build context. - :paramtype build: ~azure.mgmt.machinelearningservices.models.BuildContext - :keyword conda_file: Standard configuration file used by Conda that lets you install any kind - of package, including Python, R, and C/C++ packages. - - - .. raw:: html - - . - :paramtype conda_file: str - :keyword image: Name of the image that will be used for the environment. - - - .. raw:: html - - . - :paramtype image: str - :keyword inference_config: Defines configuration specific to inference. - :paramtype inference_config: - ~azure.mgmt.machinelearningservices.models.InferenceContainerProperties - :keyword intellectual_property: Intellectual Property details. Used if environment is an - Intellectual Property. - :paramtype intellectual_property: - ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword os_type: The OS type of the environment. Possible values include: "Linux", "Windows". - :paramtype os_type: str or ~azure.mgmt.machinelearningservices.models.OperatingSystemType - :keyword stage: Stage in the environment lifecycle assigned to this environment. - :paramtype stage: str - """ - super(EnvironmentVersionProperties, self).__init__(**kwargs) - self.auto_rebuild = kwargs.get('auto_rebuild', None) - self.build = kwargs.get('build', None) - self.conda_file = kwargs.get('conda_file', None) - self.environment_type = None - self.image = kwargs.get('image', None) - self.inference_config = kwargs.get('inference_config', None) - self.intellectual_property = kwargs.get('intellectual_property', None) - self.os_type = kwargs.get('os_type', None) - self.provisioning_state = None - self.stage = kwargs.get('stage', None) - - -class EnvironmentVersionResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of EnvironmentVersion entities. - - :ivar next_link: The link to the next page of EnvironmentVersion objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type EnvironmentVersion. - :vartype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[EnvironmentVersion]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of EnvironmentVersion objects. If null, there are - no additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type EnvironmentVersion. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] - """ - super(EnvironmentVersionResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class ErrorAdditionalInfo(msrest.serialization.Model): - """The resource management error additional info. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar type: The additional info type. - :vartype type: str - :ivar info: The additional info. - :vartype info: any - """ - - _validation = { - 'type': {'readonly': True}, - 'info': {'readonly': True}, - } - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'info': {'key': 'info', 'type': 'object'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ErrorAdditionalInfo, self).__init__(**kwargs) - self.type = None - self.info = None - - -class ErrorDetail(msrest.serialization.Model): - """The error detail. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar code: The error code. - :vartype code: str - :ivar message: The error message. - :vartype message: str - :ivar target: The error target. - :vartype target: str - :ivar details: The error details. - :vartype details: list[~azure.mgmt.machinelearningservices.models.ErrorDetail] - :ivar additional_info: The error additional info. - :vartype additional_info: list[~azure.mgmt.machinelearningservices.models.ErrorAdditionalInfo] - """ - - _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - 'target': {'readonly': True}, - 'details': {'readonly': True}, - 'additional_info': {'readonly': True}, - } - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetail]'}, - 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ErrorDetail, self).__init__(**kwargs) - self.code = None - self.message = None - self.target = None - self.details = None - self.additional_info = None - - -class ErrorResponse(msrest.serialization.Model): - """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). - - :ivar error: The error object. - :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorDetail - """ - - _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorDetail'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword error: The error object. - :paramtype error: ~azure.mgmt.machinelearningservices.models.ErrorDetail - """ - super(ErrorResponse, self).__init__(**kwargs) - self.error = kwargs.get('error', None) - - -class EstimatedVMPrice(msrest.serialization.Model): - """The estimated price info for using a VM of a particular OS type, tier, etc. - - All required parameters must be populated in order to send to Azure. - - :ivar retail_price: Required. The price charged for using the VM. - :vartype retail_price: float - :ivar os_type: Required. Operating system type used by the VM. Possible values include: - "Linux", "Windows". - :vartype os_type: str or ~azure.mgmt.machinelearningservices.models.VMPriceOSType - :ivar vm_tier: Required. The type of the VM. Possible values include: "Standard", - "LowPriority", "Spot". - :vartype vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier - """ - - _validation = { - 'retail_price': {'required': True}, - 'os_type': {'required': True}, - 'vm_tier': {'required': True}, - } - - _attribute_map = { - 'retail_price': {'key': 'retailPrice', 'type': 'float'}, - 'os_type': {'key': 'osType', 'type': 'str'}, - 'vm_tier': {'key': 'vmTier', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword retail_price: Required. The price charged for using the VM. - :paramtype retail_price: float - :keyword os_type: Required. Operating system type used by the VM. Possible values include: - "Linux", "Windows". - :paramtype os_type: str or ~azure.mgmt.machinelearningservices.models.VMPriceOSType - :keyword vm_tier: Required. The type of the VM. Possible values include: "Standard", - "LowPriority", "Spot". - :paramtype vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier - """ - super(EstimatedVMPrice, self).__init__(**kwargs) - self.retail_price = kwargs['retail_price'] - self.os_type = kwargs['os_type'] - self.vm_tier = kwargs['vm_tier'] - - -class EstimatedVMPrices(msrest.serialization.Model): - """The estimated price info for using a VM. - - All required parameters must be populated in order to send to Azure. - - :ivar billing_currency: Required. Three lettered code specifying the currency of the VM price. - Example: USD. Possible values include: "USD". - :vartype billing_currency: str or ~azure.mgmt.machinelearningservices.models.BillingCurrency - :ivar unit_of_measure: Required. The unit of time measurement for the specified VM price. - Example: OneHour. Possible values include: "OneHour". - :vartype unit_of_measure: str or ~azure.mgmt.machinelearningservices.models.UnitOfMeasure - :ivar values: Required. The list of estimated prices for using a VM of a particular OS type, - tier, etc. - :vartype values: list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] - """ - - _validation = { - 'billing_currency': {'required': True}, - 'unit_of_measure': {'required': True}, - 'values': {'required': True}, - } - - _attribute_map = { - 'billing_currency': {'key': 'billingCurrency', 'type': 'str'}, - 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[EstimatedVMPrice]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword billing_currency: Required. Three lettered code specifying the currency of the VM - price. Example: USD. Possible values include: "USD". - :paramtype billing_currency: str or ~azure.mgmt.machinelearningservices.models.BillingCurrency - :keyword unit_of_measure: Required. The unit of time measurement for the specified VM price. - Example: OneHour. Possible values include: "OneHour". - :paramtype unit_of_measure: str or ~azure.mgmt.machinelearningservices.models.UnitOfMeasure - :keyword values: Required. The list of estimated prices for using a VM of a particular OS type, - tier, etc. - :paramtype values: list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] - """ - super(EstimatedVMPrices, self).__init__(**kwargs) - self.billing_currency = kwargs['billing_currency'] - self.unit_of_measure = kwargs['unit_of_measure'] - self.values = kwargs['values'] - - -class ExternalFQDNResponse(msrest.serialization.Model): - """ExternalFQDNResponse. - - :ivar value: - :vartype value: list[~azure.mgmt.machinelearningservices.models.FQDNEndpointsPropertyBag] - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[FQDNEndpointsPropertyBag]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword value: - :paramtype value: list[~azure.mgmt.machinelearningservices.models.FQDNEndpointsPropertyBag] - """ - super(ExternalFQDNResponse, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class Feature(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.FeatureProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeatureProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeatureProperties - """ - super(Feature, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class FeatureAttributionDriftMonitoringSignal(MonitoringSignalBase): - """FeatureAttributionDriftMonitoringSignal. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", - "Enabled". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". - :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType - :ivar metric_threshold: Required. [Required] A list of metrics to calculate and their - associated thresholds. - :vartype metric_threshold: - ~azure.mgmt.machinelearningservices.models.FeatureAttributionMetricThreshold - :ivar production_data: Required. [Required] The data which drift will be calculated for. - :vartype production_data: - list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] - :ivar reference_data: Required. [Required] The data to calculate drift against. - :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - """ - - _validation = { - 'signal_type': {'required': True}, - 'metric_threshold': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'metric_threshold': {'key': 'metricThreshold', 'type': 'FeatureAttributionMetricThreshold'}, - 'production_data': {'key': 'productionData', 'type': '[MonitoringInputDataBase]'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword metric_threshold: Required. [Required] A list of metrics to calculate and their - associated thresholds. - :paramtype metric_threshold: - ~azure.mgmt.machinelearningservices.models.FeatureAttributionMetricThreshold - :keyword production_data: Required. [Required] The data which drift will be calculated for. - :paramtype production_data: - list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] - :keyword reference_data: Required. [Required] The data to calculate drift against. - :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - """ - super(FeatureAttributionDriftMonitoringSignal, self).__init__(**kwargs) - self.signal_type = 'FeatureAttributionDrift' # type: str - self.metric_threshold = kwargs['metric_threshold'] - self.production_data = kwargs['production_data'] - self.reference_data = kwargs['reference_data'] - - -class FeatureAttributionMetricThreshold(msrest.serialization.Model): - """FeatureAttributionMetricThreshold. - - All required parameters must be populated in order to send to Azure. - - :ivar metric: Required. [Required] The feature attribution metric to calculate. Possible values - include: "NormalizedDiscountedCumulativeGain". - :vartype metric: str or ~azure.mgmt.machinelearningservices.models.FeatureAttributionMetric - :ivar threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - - _validation = { - 'metric': {'required': True}, - } - - _attribute_map = { - 'metric': {'key': 'metric', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword metric: Required. [Required] The feature attribution metric to calculate. Possible - values include: "NormalizedDiscountedCumulativeGain". - :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.FeatureAttributionMetric - :keyword threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - super(FeatureAttributionMetricThreshold, self).__init__(**kwargs) - self.metric = kwargs['metric'] - self.threshold = kwargs.get('threshold', None) - - -class FeatureProperties(ResourceBase): - """Dto object representing feature. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar data_type: Specifies type. Possible values include: "String", "Integer", "Long", "Float", - "Double", "Binary", "Datetime", "Boolean". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.FeatureDataType - :ivar feature_name: Specifies name. - :vartype feature_name: str - """ - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'feature_name': {'key': 'featureName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword data_type: Specifies type. Possible values include: "String", "Integer", "Long", - "Float", "Double", "Binary", "Datetime", "Boolean". - :paramtype data_type: str or ~azure.mgmt.machinelearningservices.models.FeatureDataType - :keyword feature_name: Specifies name. - :paramtype feature_name: str - """ - super(FeatureProperties, self).__init__(**kwargs) - self.data_type = kwargs.get('data_type', None) - self.feature_name = kwargs.get('feature_name', None) - - -class FeatureResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of Feature entities. - - :ivar next_link: The link to the next page of Feature objects. If null, there are no additional - pages. - :vartype next_link: str - :ivar value: An array of objects of type Feature. - :vartype value: list[~azure.mgmt.machinelearningservices.models.Feature] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Feature]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of Feature objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type Feature. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.Feature] - """ - super(FeatureResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class FeaturesetContainer(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.FeaturesetContainerProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeaturesetContainerProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeaturesetContainerProperties - """ - super(FeaturesetContainer, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class FeaturesetContainerProperties(AssetContainer): - """Dto object representing feature set. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar latest_version: The latest version inside this container. - :vartype latest_version: str - :ivar next_version: The next auto incremental version. - :vartype next_version: str - :ivar provisioning_state: Provisioning state for the featureset container. Possible values - include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState - """ - - _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - """ - super(FeaturesetContainerProperties, self).__init__(**kwargs) - self.provisioning_state = None - - -class FeaturesetContainerResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of FeaturesetContainer entities. - - :ivar next_link: The link to the next page of FeaturesetContainer objects. If null, there are - no additional pages. - :vartype next_link: str - :ivar value: An array of objects of type FeaturesetContainer. - :vartype value: list[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[FeaturesetContainer]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of FeaturesetContainer objects. If null, there - are no additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type FeaturesetContainer. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] - """ - super(FeaturesetContainerResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class FeaturesetSpecification(msrest.serialization.Model): - """Dto object representing specification. - - :ivar path: Specifies the spec path. - :vartype path: str - """ - - _attribute_map = { - 'path': {'key': 'path', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword path: Specifies the spec path. - :paramtype path: str - """ - super(FeaturesetSpecification, self).__init__(**kwargs) - self.path = kwargs.get('path', None) - - -class FeaturesetVersion(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeaturesetVersionProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionProperties - """ - super(FeaturesetVersion, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class FeaturesetVersionBackfillRequest(msrest.serialization.Model): - """Request payload for creating a backfill request for a given feature set version. - - :ivar data_availability_status: Specified the data availability status that you want to - backfill. - :vartype data_availability_status: list[str or - ~azure.mgmt.machinelearningservices.models.DataAvailabilityStatus] - :ivar description: Specifies description. - :vartype description: str - :ivar display_name: Specifies description. - :vartype display_name: str - :ivar feature_window: Specifies the backfill feature window to be materialized. - :vartype feature_window: ~azure.mgmt.machinelearningservices.models.FeatureWindow - :ivar job_id: Specify the jobId to retry the failed materialization. - :vartype job_id: str - :ivar properties: Specifies the properties. - :vartype properties: dict[str, str] - :ivar resource: Specifies the compute resource settings. - :vartype resource: ~azure.mgmt.machinelearningservices.models.MaterializationComputeResource - :ivar spark_configuration: Specifies the spark compute settings. - :vartype spark_configuration: dict[str, str] - :ivar tags: A set of tags. Specifies the tags. - :vartype tags: dict[str, str] - """ - - _attribute_map = { - 'data_availability_status': {'key': 'dataAvailabilityStatus', 'type': '[str]'}, - 'description': {'key': 'description', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'feature_window': {'key': 'featureWindow', 'type': 'FeatureWindow'}, - 'job_id': {'key': 'jobId', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'resource': {'key': 'resource', 'type': 'MaterializationComputeResource'}, - 'spark_configuration': {'key': 'sparkConfiguration', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword data_availability_status: Specified the data availability status that you want to - backfill. - :paramtype data_availability_status: list[str or - ~azure.mgmt.machinelearningservices.models.DataAvailabilityStatus] - :keyword description: Specifies description. - :paramtype description: str - :keyword display_name: Specifies description. - :paramtype display_name: str - :keyword feature_window: Specifies the backfill feature window to be materialized. - :paramtype feature_window: ~azure.mgmt.machinelearningservices.models.FeatureWindow - :keyword job_id: Specify the jobId to retry the failed materialization. - :paramtype job_id: str - :keyword properties: Specifies the properties. - :paramtype properties: dict[str, str] - :keyword resource: Specifies the compute resource settings. - :paramtype resource: ~azure.mgmt.machinelearningservices.models.MaterializationComputeResource - :keyword spark_configuration: Specifies the spark compute settings. - :paramtype spark_configuration: dict[str, str] - :keyword tags: A set of tags. Specifies the tags. - :paramtype tags: dict[str, str] - """ - super(FeaturesetVersionBackfillRequest, self).__init__(**kwargs) - self.data_availability_status = kwargs.get('data_availability_status', None) - self.description = kwargs.get('description', None) - self.display_name = kwargs.get('display_name', None) - self.feature_window = kwargs.get('feature_window', None) - self.job_id = kwargs.get('job_id', None) - self.properties = kwargs.get('properties', None) - self.resource = kwargs.get('resource', None) - self.spark_configuration = kwargs.get('spark_configuration', None) - self.tags = kwargs.get('tags', None) - - -class FeaturesetVersionBackfillResponse(msrest.serialization.Model): - """Response payload for creating a backfill request for a given feature set version. - - :ivar job_ids: List of jobs submitted as part of the backfill request. - :vartype job_ids: list[str] - """ - - _attribute_map = { - 'job_ids': {'key': 'jobIds', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword job_ids: List of jobs submitted as part of the backfill request. - :paramtype job_ids: list[str] - """ - super(FeaturesetVersionBackfillResponse, self).__init__(**kwargs) - self.job_ids = kwargs.get('job_ids', None) - - -class FeaturesetVersionProperties(AssetBase): - """Dto object representing feature set version. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :vartype is_archived: bool - :ivar entities: Specifies list of entities. - :vartype entities: list[str] - :ivar materialization_settings: Specifies the materialization settings. - :vartype materialization_settings: - ~azure.mgmt.machinelearningservices.models.MaterializationSettings - :ivar provisioning_state: Provisioning state for the featureset version container. Possible - values include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState - :ivar specification: Specifies the feature spec details. - :vartype specification: ~azure.mgmt.machinelearningservices.models.FeaturesetSpecification - :ivar stage: Specifies the asset stage. - :vartype stage: str - """ - - _validation = { - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'entities': {'key': 'entities', 'type': '[str]'}, - 'materialization_settings': {'key': 'materializationSettings', 'type': 'MaterializationSettings'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'specification': {'key': 'specification', 'type': 'FeaturesetSpecification'}, - 'stage': {'key': 'stage', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :paramtype is_archived: bool - :keyword entities: Specifies list of entities. - :paramtype entities: list[str] - :keyword materialization_settings: Specifies the materialization settings. - :paramtype materialization_settings: - ~azure.mgmt.machinelearningservices.models.MaterializationSettings - :keyword specification: Specifies the feature spec details. - :paramtype specification: ~azure.mgmt.machinelearningservices.models.FeaturesetSpecification - :keyword stage: Specifies the asset stage. - :paramtype stage: str - """ - super(FeaturesetVersionProperties, self).__init__(**kwargs) - self.entities = kwargs.get('entities', None) - self.materialization_settings = kwargs.get('materialization_settings', None) - self.provisioning_state = None - self.specification = kwargs.get('specification', None) - self.stage = kwargs.get('stage', None) - - -class FeaturesetVersionResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of FeaturesetVersion entities. - - :ivar next_link: The link to the next page of FeaturesetVersion objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type FeaturesetVersion. - :vartype value: list[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[FeaturesetVersion]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of FeaturesetVersion objects. If null, there are - no additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type FeaturesetVersion. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] - """ - super(FeaturesetVersionResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class FeaturestoreEntityContainer(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: - ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainerProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeaturestoreEntityContainerProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: - ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainerProperties - """ - super(FeaturestoreEntityContainer, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class FeaturestoreEntityContainerProperties(AssetContainer): - """Dto object representing feature entity. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar latest_version: The latest version inside this container. - :vartype latest_version: str - :ivar next_version: The next auto incremental version. - :vartype next_version: str - :ivar provisioning_state: Provisioning state for the featurestore entity container. Possible - values include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState - """ - - _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - """ - super(FeaturestoreEntityContainerProperties, self).__init__(**kwargs) - self.provisioning_state = None - - -class FeaturestoreEntityContainerResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of FeaturestoreEntityContainer entities. - - :ivar next_link: The link to the next page of FeaturestoreEntityContainer objects. If null, - there are no additional pages. - :vartype next_link: str - :ivar value: An array of objects of type FeaturestoreEntityContainer. - :vartype value: list[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[FeaturestoreEntityContainer]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of FeaturestoreEntityContainer objects. If null, - there are no additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type FeaturestoreEntityContainer. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] - """ - super(FeaturestoreEntityContainerResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class FeaturestoreEntityVersion(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: - ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersionProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeaturestoreEntityVersionProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: - ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersionProperties - """ - super(FeaturestoreEntityVersion, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class FeaturestoreEntityVersionProperties(AssetBase): - """Dto object representing feature entity version. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :vartype is_archived: bool - :ivar index_columns: Specifies index columns. - :vartype index_columns: list[~azure.mgmt.machinelearningservices.models.IndexColumn] - :ivar provisioning_state: Provisioning state for the featurestore entity version. Possible - values include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState - :ivar stage: Specifies the asset stage. - :vartype stage: str - """ - - _validation = { - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'index_columns': {'key': 'indexColumns', 'type': '[IndexColumn]'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'stage': {'key': 'stage', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :paramtype is_archived: bool - :keyword index_columns: Specifies index columns. - :paramtype index_columns: list[~azure.mgmt.machinelearningservices.models.IndexColumn] - :keyword stage: Specifies the asset stage. - :paramtype stage: str - """ - super(FeaturestoreEntityVersionProperties, self).__init__(**kwargs) - self.index_columns = kwargs.get('index_columns', None) - self.provisioning_state = None - self.stage = kwargs.get('stage', None) - - -class FeaturestoreEntityVersionResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of FeaturestoreEntityVersion entities. - - :ivar next_link: The link to the next page of FeaturestoreEntityVersion objects. If null, there - are no additional pages. - :vartype next_link: str - :ivar value: An array of objects of type FeaturestoreEntityVersion. - :vartype value: list[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[FeaturestoreEntityVersion]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of FeaturestoreEntityVersion objects. If null, - there are no additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type FeaturestoreEntityVersion. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] - """ - super(FeaturestoreEntityVersionResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class FeatureStoreSettings(msrest.serialization.Model): - """FeatureStoreSettings. - - :ivar compute_runtime: - :vartype compute_runtime: ~azure.mgmt.machinelearningservices.models.ComputeRuntimeDto - :ivar offline_store_connection_name: - :vartype offline_store_connection_name: str - :ivar online_store_connection_name: - :vartype online_store_connection_name: str - """ - - _attribute_map = { - 'compute_runtime': {'key': 'computeRuntime', 'type': 'ComputeRuntimeDto'}, - 'offline_store_connection_name': {'key': 'offlineStoreConnectionName', 'type': 'str'}, - 'online_store_connection_name': {'key': 'onlineStoreConnectionName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword compute_runtime: - :paramtype compute_runtime: ~azure.mgmt.machinelearningservices.models.ComputeRuntimeDto - :keyword offline_store_connection_name: - :paramtype offline_store_connection_name: str - :keyword online_store_connection_name: - :paramtype online_store_connection_name: str - """ - super(FeatureStoreSettings, self).__init__(**kwargs) - self.compute_runtime = kwargs.get('compute_runtime', None) - self.offline_store_connection_name = kwargs.get('offline_store_connection_name', None) - self.online_store_connection_name = kwargs.get('online_store_connection_name', None) - - -class FeatureSubset(MonitoringFeatureFilterBase): - """FeatureSubset. - - All required parameters must be populated in order to send to Azure. - - :ivar filter_type: Required. [Required] Specifies the feature filter to leverage when selecting - features to calculate metrics over.Constant filled by server. Possible values include: - "AllFeatures", "TopNByAttribution", "FeatureSubset". - :vartype filter_type: str or - ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterType - :ivar features: Required. [Required] The list of features to include. - :vartype features: list[str] - """ - - _validation = { - 'filter_type': {'required': True}, - 'features': {'required': True}, - } - - _attribute_map = { - 'filter_type': {'key': 'filterType', 'type': 'str'}, - 'features': {'key': 'features', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword features: Required. [Required] The list of features to include. - :paramtype features: list[str] - """ - super(FeatureSubset, self).__init__(**kwargs) - self.filter_type = 'FeatureSubset' # type: str - self.features = kwargs['features'] - - -class FeatureWindow(msrest.serialization.Model): - """Specifies the feature window. - - :ivar feature_window_end: Specifies the feature window end time. - :vartype feature_window_end: ~datetime.datetime - :ivar feature_window_start: Specifies the feature window start time. - :vartype feature_window_start: ~datetime.datetime - """ - - _attribute_map = { - 'feature_window_end': {'key': 'featureWindowEnd', 'type': 'iso-8601'}, - 'feature_window_start': {'key': 'featureWindowStart', 'type': 'iso-8601'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword feature_window_end: Specifies the feature window end time. - :paramtype feature_window_end: ~datetime.datetime - :keyword feature_window_start: Specifies the feature window start time. - :paramtype feature_window_start: ~datetime.datetime - """ - super(FeatureWindow, self).__init__(**kwargs) - self.feature_window_end = kwargs.get('feature_window_end', None) - self.feature_window_start = kwargs.get('feature_window_start', None) - - -class FeaturizationSettings(msrest.serialization.Model): - """Featurization Configuration. - - :ivar dataset_language: Dataset language, useful for the text data. - :vartype dataset_language: str - """ - - _attribute_map = { - 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword dataset_language: Dataset language, useful for the text data. - :paramtype dataset_language: str - """ - super(FeaturizationSettings, self).__init__(**kwargs) - self.dataset_language = kwargs.get('dataset_language', None) - - -class FileSystemSource(DataImportSource): - """FileSystemSource. - - All required parameters must be populated in order to send to Azure. - - :ivar connection: Workspace connection for data import source storage. - :vartype connection: str - :ivar source_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "database", "file_system". - :vartype source_type: str or ~azure.mgmt.machinelearningservices.models.DataImportSourceType - :ivar path: Path on data import FileSystem source. - :vartype path: str - """ - - _validation = { - 'source_type': {'required': True}, - } - - _attribute_map = { - 'connection': {'key': 'connection', 'type': 'str'}, - 'source_type': {'key': 'sourceType', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword connection: Workspace connection for data import source storage. - :paramtype connection: str - :keyword path: Path on data import FileSystem source. - :paramtype path: str - """ - super(FileSystemSource, self).__init__(**kwargs) - self.source_type = 'file_system' # type: str - self.path = kwargs.get('path', None) - - -class MonitoringInputDataBase(msrest.serialization.Model): - """Monitoring input data base definition. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FixedInputData, StaticInputData, TrailingInputData. - - All required parameters must be populated in order to send to Azure. - - :ivar columns: Mapping of column names to special uses. - :vartype columns: dict[str, str] - :ivar data_context: The context metadata of the data source. - :vartype data_context: str - :ivar input_data_type: Required. [Required] Specifies the type of signal to monitor.Constant - filled by server. Possible values include: "Static", "Trailing", "Fixed". - :vartype input_data_type: str or - ~azure.mgmt.machinelearningservices.models.MonitoringInputDataType - :ivar job_input_type: Required. [Required] Specifies the type of job. Possible values include: - "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str - """ - - _validation = { - 'input_data_type': {'required': True}, - 'job_input_type': {'required': True}, - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'columns': {'key': 'columns', 'type': '{str}'}, - 'data_context': {'key': 'dataContext', 'type': 'str'}, - 'input_data_type': {'key': 'inputDataType', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - } - - _subtype_map = { - 'input_data_type': {'Fixed': 'FixedInputData', 'Static': 'StaticInputData', 'Trailing': 'TrailingInputData'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword columns: Mapping of column names to special uses. - :paramtype columns: dict[str, str] - :keyword data_context: The context metadata of the data source. - :paramtype data_context: str - :keyword job_input_type: Required. [Required] Specifies the type of job. Possible values - include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", - "triton_model". - :paramtype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str - """ - super(MonitoringInputDataBase, self).__init__(**kwargs) - self.columns = kwargs.get('columns', None) - self.data_context = kwargs.get('data_context', None) - self.input_data_type = None # type: Optional[str] - self.job_input_type = kwargs['job_input_type'] - self.uri = kwargs['uri'] - - -class FixedInputData(MonitoringInputDataBase): - """Fixed input data definition. - - All required parameters must be populated in order to send to Azure. - - :ivar columns: Mapping of column names to special uses. - :vartype columns: dict[str, str] - :ivar data_context: The context metadata of the data source. - :vartype data_context: str - :ivar input_data_type: Required. [Required] Specifies the type of signal to monitor.Constant - filled by server. Possible values include: "Static", "Trailing", "Fixed". - :vartype input_data_type: str or - ~azure.mgmt.machinelearningservices.models.MonitoringInputDataType - :ivar job_input_type: Required. [Required] Specifies the type of job. Possible values include: - "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str - """ - - _validation = { - 'input_data_type': {'required': True}, - 'job_input_type': {'required': True}, - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'columns': {'key': 'columns', 'type': '{str}'}, - 'data_context': {'key': 'dataContext', 'type': 'str'}, - 'input_data_type': {'key': 'inputDataType', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword columns: Mapping of column names to special uses. - :paramtype columns: dict[str, str] - :keyword data_context: The context metadata of the data source. - :paramtype data_context: str - :keyword job_input_type: Required. [Required] Specifies the type of job. Possible values - include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", - "triton_model". - :paramtype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str - """ - super(FixedInputData, self).__init__(**kwargs) - self.input_data_type = 'Fixed' # type: str - - -class FlavorData(msrest.serialization.Model): - """FlavorData. - - :ivar data: Model flavor-specific data. - :vartype data: dict[str, str] - """ - - _attribute_map = { - 'data': {'key': 'data', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword data: Model flavor-specific data. - :paramtype data: dict[str, str] - """ - super(FlavorData, self).__init__(**kwargs) - self.data = kwargs.get('data', None) - - -class Forecasting(AutoMLVertical, TableVertical): - """Forecasting task in AutoML Table vertical. - - All required parameters must be populated in order to send to Azure. - - :ivar cv_split_column_names: Columns to use for CVSplit data. - :vartype cv_split_column_names: list[str] - :ivar featurization_settings: Featurization inputs needed for AutoML job. - :vartype featurization_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings - :ivar fixed_parameters: Model/training parameters that will remain constant throughout - training. - :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters - :ivar limit_settings: Execution constraints for AutoMLJob. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings - :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset - when validation dataset is not provided. - :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] - :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings - :ivar test_data: Test data input. - :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation - purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype test_data_size: float - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar weight_column_name: The name of the sample weight column. Automated ML supports a - weighted column as an input, causing rows in the data to be weighted up or down. - :vartype weight_column_name: str - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar forecasting_settings: Forecasting task specific inputs. - :vartype forecasting_settings: ~azure.mgmt.machinelearningservices.models.ForecastingSettings - :ivar primary_metric: Primary metric for forecasting task. Possible values include: - "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", - "NormalizedMeanAbsoluteError". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ForecastingPrimaryMetrics - :ivar training_settings: Inputs for training phase for an AutoML Job. - :vartype training_settings: - ~azure.mgmt.machinelearningservices.models.ForecastingTrainingSettings - """ - - _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, - } - - _attribute_map = { - 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'TableFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, - 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, - 'search_space': {'key': 'searchSpace', 'type': '[TableParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'TableSweepSettings'}, - 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, - 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'forecasting_settings': {'key': 'forecastingSettings', 'type': 'ForecastingSettings'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - 'training_settings': {'key': 'trainingSettings', 'type': 'ForecastingTrainingSettings'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword cv_split_column_names: Columns to use for CVSplit data. - :paramtype cv_split_column_names: list[str] - :keyword featurization_settings: Featurization inputs needed for AutoML job. - :paramtype featurization_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings - :keyword fixed_parameters: Model/training parameters that will remain constant throughout - training. - :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters - :keyword limit_settings: Execution constraints for AutoMLJob. - :paramtype limit_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings - :keyword n_cross_validations: Number of cross validation folds to be applied on training - dataset - when validation dataset is not provided. - :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] - :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings - :keyword test_data: Test data input. - :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation - purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype test_data_size: float - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword weight_column_name: The name of the sample weight column. Automated ML supports a - weighted column as an input, causing rows in the data to be weighted up or down. - :paramtype weight_column_name: str - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword forecasting_settings: Forecasting task specific inputs. - :paramtype forecasting_settings: ~azure.mgmt.machinelearningservices.models.ForecastingSettings - :keyword primary_metric: Primary metric for forecasting task. Possible values include: - "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", - "NormalizedMeanAbsoluteError". - :paramtype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ForecastingPrimaryMetrics - :keyword training_settings: Inputs for training phase for an AutoML Job. - :paramtype training_settings: - ~azure.mgmt.machinelearningservices.models.ForecastingTrainingSettings - """ - super(Forecasting, self).__init__(**kwargs) - self.cv_split_column_names = kwargs.get('cv_split_column_names', None) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.n_cross_validations = kwargs.get('n_cross_validations', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.test_data = kwargs.get('test_data', None) - self.test_data_size = kwargs.get('test_data_size', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.weight_column_name = kwargs.get('weight_column_name', None) - self.task_type = 'Forecasting' # type: str - self.forecasting_settings = kwargs.get('forecasting_settings', None) - self.primary_metric = kwargs.get('primary_metric', None) - self.training_settings = kwargs.get('training_settings', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] - - -class ForecastingSettings(msrest.serialization.Model): - """Forecasting specific parameters. - - :ivar country_or_region_for_holidays: Country or region for holidays for forecasting tasks. - These should be ISO 3166 two-letter country/region codes, for example 'US' or 'GB'. - :vartype country_or_region_for_holidays: str - :ivar cv_step_size: Number of periods between the origin time of one CV fold and the next fold. - For - example, if ``CVStepSize`` = 3 for daily data, the origin time for each fold will be - three days apart. - :vartype cv_step_size: int - :ivar feature_lags: Flag for generating lags for the numeric features with 'auto' or null. - Possible values include: "None", "Auto". - :vartype feature_lags: str or ~azure.mgmt.machinelearningservices.models.FeatureLags - :ivar features_unknown_at_forecast_time: The feature columns that are available for training - but unknown at the time of forecast/inference. - If features_unknown_at_forecast_time is not set, it is assumed that all the feature columns in - the dataset are known at inference time. - :vartype features_unknown_at_forecast_time: list[str] - :ivar forecast_horizon: The desired maximum forecast horizon in units of time-series frequency. - :vartype forecast_horizon: ~azure.mgmt.machinelearningservices.models.ForecastHorizon - :ivar frequency: When forecasting, this parameter represents the period with which the forecast - is desired, for example daily, weekly, yearly, etc. The forecast frequency is dataset frequency - by default. - :vartype frequency: str - :ivar seasonality: Set time series seasonality as an integer multiple of the series frequency. - If seasonality is set to 'auto', it will be inferred. - :vartype seasonality: ~azure.mgmt.machinelearningservices.models.Seasonality - :ivar short_series_handling_config: The parameter defining how if AutoML should handle short - time series. Possible values include: "None", "Auto", "Pad", "Drop". - :vartype short_series_handling_config: str or - ~azure.mgmt.machinelearningservices.models.ShortSeriesHandlingConfiguration - :ivar target_aggregate_function: The function to be used to aggregate the time series target - column to conform to a user specified frequency. - If the TargetAggregateFunction is set i.e. not 'None', but the freq parameter is not set, the - error is raised. The possible target aggregation functions are: "sum", "max", "min" and "mean". - Possible values include: "None", "Sum", "Max", "Min", "Mean". - :vartype target_aggregate_function: str or - ~azure.mgmt.machinelearningservices.models.TargetAggregationFunction - :ivar target_lags: The number of past periods to lag from the target column. - :vartype target_lags: ~azure.mgmt.machinelearningservices.models.TargetLags - :ivar target_rolling_window_size: The number of past periods used to create a rolling window - average of the target column. - :vartype target_rolling_window_size: - ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSize - :ivar time_column_name: The name of the time column. This parameter is required when - forecasting to specify the datetime column in the input data used for building the time series - and inferring its frequency. - :vartype time_column_name: str - :ivar time_series_id_column_names: The names of columns used to group a timeseries. It can be - used to create multiple series. - If grain is not defined, the data set is assumed to be one time-series. This parameter is used - with task type forecasting. - :vartype time_series_id_column_names: list[str] - :ivar use_stl: Configure STL Decomposition of the time-series target column. Possible values - include: "None", "Season", "SeasonTrend". - :vartype use_stl: str or ~azure.mgmt.machinelearningservices.models.UseStl - """ - - _attribute_map = { - 'country_or_region_for_holidays': {'key': 'countryOrRegionForHolidays', 'type': 'str'}, - 'cv_step_size': {'key': 'cvStepSize', 'type': 'int'}, - 'feature_lags': {'key': 'featureLags', 'type': 'str'}, - 'features_unknown_at_forecast_time': {'key': 'featuresUnknownAtForecastTime', 'type': '[str]'}, - 'forecast_horizon': {'key': 'forecastHorizon', 'type': 'ForecastHorizon'}, - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'seasonality': {'key': 'seasonality', 'type': 'Seasonality'}, - 'short_series_handling_config': {'key': 'shortSeriesHandlingConfig', 'type': 'str'}, - 'target_aggregate_function': {'key': 'targetAggregateFunction', 'type': 'str'}, - 'target_lags': {'key': 'targetLags', 'type': 'TargetLags'}, - 'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'TargetRollingWindowSize'}, - 'time_column_name': {'key': 'timeColumnName', 'type': 'str'}, - 'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'}, - 'use_stl': {'key': 'useStl', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword country_or_region_for_holidays: Country or region for holidays for forecasting tasks. - These should be ISO 3166 two-letter country/region codes, for example 'US' or 'GB'. - :paramtype country_or_region_for_holidays: str - :keyword cv_step_size: Number of periods between the origin time of one CV fold and the next - fold. For - example, if ``CVStepSize`` = 3 for daily data, the origin time for each fold will be - three days apart. - :paramtype cv_step_size: int - :keyword feature_lags: Flag for generating lags for the numeric features with 'auto' or null. - Possible values include: "None", "Auto". - :paramtype feature_lags: str or ~azure.mgmt.machinelearningservices.models.FeatureLags - :keyword features_unknown_at_forecast_time: The feature columns that are available for training - but unknown at the time of forecast/inference. - If features_unknown_at_forecast_time is not set, it is assumed that all the feature columns in - the dataset are known at inference time. - :paramtype features_unknown_at_forecast_time: list[str] - :keyword forecast_horizon: The desired maximum forecast horizon in units of time-series - frequency. - :paramtype forecast_horizon: ~azure.mgmt.machinelearningservices.models.ForecastHorizon - :keyword frequency: When forecasting, this parameter represents the period with which the - forecast is desired, for example daily, weekly, yearly, etc. The forecast frequency is dataset - frequency by default. - :paramtype frequency: str - :keyword seasonality: Set time series seasonality as an integer multiple of the series - frequency. - If seasonality is set to 'auto', it will be inferred. - :paramtype seasonality: ~azure.mgmt.machinelearningservices.models.Seasonality - :keyword short_series_handling_config: The parameter defining how if AutoML should handle short - time series. Possible values include: "None", "Auto", "Pad", "Drop". - :paramtype short_series_handling_config: str or - ~azure.mgmt.machinelearningservices.models.ShortSeriesHandlingConfiguration - :keyword target_aggregate_function: The function to be used to aggregate the time series target - column to conform to a user specified frequency. - If the TargetAggregateFunction is set i.e. not 'None', but the freq parameter is not set, the - error is raised. The possible target aggregation functions are: "sum", "max", "min" and "mean". - Possible values include: "None", "Sum", "Max", "Min", "Mean". - :paramtype target_aggregate_function: str or - ~azure.mgmt.machinelearningservices.models.TargetAggregationFunction - :keyword target_lags: The number of past periods to lag from the target column. - :paramtype target_lags: ~azure.mgmt.machinelearningservices.models.TargetLags - :keyword target_rolling_window_size: The number of past periods used to create a rolling window - average of the target column. - :paramtype target_rolling_window_size: - ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSize - :keyword time_column_name: The name of the time column. This parameter is required when - forecasting to specify the datetime column in the input data used for building the time series - and inferring its frequency. - :paramtype time_column_name: str - :keyword time_series_id_column_names: The names of columns used to group a timeseries. It can - be used to create multiple series. - If grain is not defined, the data set is assumed to be one time-series. This parameter is used - with task type forecasting. - :paramtype time_series_id_column_names: list[str] - :keyword use_stl: Configure STL Decomposition of the time-series target column. Possible values - include: "None", "Season", "SeasonTrend". - :paramtype use_stl: str or ~azure.mgmt.machinelearningservices.models.UseStl - """ - super(ForecastingSettings, self).__init__(**kwargs) - self.country_or_region_for_holidays = kwargs.get('country_or_region_for_holidays', None) - self.cv_step_size = kwargs.get('cv_step_size', None) - self.feature_lags = kwargs.get('feature_lags', None) - self.features_unknown_at_forecast_time = kwargs.get('features_unknown_at_forecast_time', None) - self.forecast_horizon = kwargs.get('forecast_horizon', None) - self.frequency = kwargs.get('frequency', None) - self.seasonality = kwargs.get('seasonality', None) - self.short_series_handling_config = kwargs.get('short_series_handling_config', None) - self.target_aggregate_function = kwargs.get('target_aggregate_function', None) - self.target_lags = kwargs.get('target_lags', None) - self.target_rolling_window_size = kwargs.get('target_rolling_window_size', None) - self.time_column_name = kwargs.get('time_column_name', None) - self.time_series_id_column_names = kwargs.get('time_series_id_column_names', None) - self.use_stl = kwargs.get('use_stl', None) - - -class ForecastingTrainingSettings(TrainingSettings): - """Forecasting Training related configuration. - - :ivar enable_dnn_training: Enable recommendation of DNN models. - :vartype enable_dnn_training: bool - :ivar enable_model_explainability: Flag to turn on explainability on best model. - :vartype enable_model_explainability: bool - :ivar enable_onnx_compatible_models: Flag for enabling onnx compatible models. - :vartype enable_onnx_compatible_models: bool - :ivar enable_stack_ensemble: Enable stack ensemble run. - :vartype enable_stack_ensemble: bool - :ivar enable_vote_ensemble: Enable voting ensemble run. - :vartype enable_vote_ensemble: bool - :ivar ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model - generation, multiple fitted models from the previous child runs are downloaded. - Configure this parameter with a higher value than 300 secs, if more time is needed. - :vartype ensemble_model_download_timeout: ~datetime.timedelta - :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. - :vartype stack_ensemble_settings: - ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings - :ivar training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to - 'non-distributed' for now, however in the future may result in mixed mode or heuristics based - mode selection. Default is 'auto'. - If 'Distributed' then only distributed featurization is used and distributed algorithms are - chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". - :vartype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode - :ivar allowed_training_algorithms: Allowed models for forecasting task. - :vartype allowed_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.ForecastingModels] - :ivar blocked_training_algorithms: Blocked models for forecasting task. - :vartype blocked_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.ForecastingModels] - """ - - _attribute_map = { - 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, - 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, - 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, - 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, - 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, - 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, - 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, - 'training_mode': {'key': 'trainingMode', 'type': 'str'}, - 'allowed_training_algorithms': {'key': 'allowedTrainingAlgorithms', 'type': '[str]'}, - 'blocked_training_algorithms': {'key': 'blockedTrainingAlgorithms', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword enable_dnn_training: Enable recommendation of DNN models. - :paramtype enable_dnn_training: bool - :keyword enable_model_explainability: Flag to turn on explainability on best model. - :paramtype enable_model_explainability: bool - :keyword enable_onnx_compatible_models: Flag for enabling onnx compatible models. - :paramtype enable_onnx_compatible_models: bool - :keyword enable_stack_ensemble: Enable stack ensemble run. - :paramtype enable_stack_ensemble: bool - :keyword enable_vote_ensemble: Enable voting ensemble run. - :paramtype enable_vote_ensemble: bool - :keyword ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model - generation, multiple fitted models from the previous child runs are downloaded. - Configure this parameter with a higher value than 300 secs, if more time is needed. - :paramtype ensemble_model_download_timeout: ~datetime.timedelta - :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. - :paramtype stack_ensemble_settings: - ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings - :keyword training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to - 'non-distributed' for now, however in the future may result in mixed mode or heuristics based - mode selection. Default is 'auto'. - If 'Distributed' then only distributed featurization is used and distributed algorithms are - chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". - :paramtype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode - :keyword allowed_training_algorithms: Allowed models for forecasting task. - :paramtype allowed_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.ForecastingModels] - :keyword blocked_training_algorithms: Blocked models for forecasting task. - :paramtype blocked_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.ForecastingModels] - """ - super(ForecastingTrainingSettings, self).__init__(**kwargs) - self.allowed_training_algorithms = kwargs.get('allowed_training_algorithms', None) - self.blocked_training_algorithms = kwargs.get('blocked_training_algorithms', None) - - -class FQDNEndpoint(msrest.serialization.Model): - """FQDNEndpoint. - - :ivar domain_name: - :vartype domain_name: str - :ivar endpoint_details: - :vartype endpoint_details: list[~azure.mgmt.machinelearningservices.models.FQDNEndpointDetail] - """ - - _attribute_map = { - 'domain_name': {'key': 'domainName', 'type': 'str'}, - 'endpoint_details': {'key': 'endpointDetails', 'type': '[FQDNEndpointDetail]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword domain_name: - :paramtype domain_name: str - :keyword endpoint_details: - :paramtype endpoint_details: - list[~azure.mgmt.machinelearningservices.models.FQDNEndpointDetail] - """ - super(FQDNEndpoint, self).__init__(**kwargs) - self.domain_name = kwargs.get('domain_name', None) - self.endpoint_details = kwargs.get('endpoint_details', None) - - -class FQDNEndpointDetail(msrest.serialization.Model): - """FQDNEndpointDetail. - - :ivar port: - :vartype port: int - """ - - _attribute_map = { - 'port': {'key': 'port', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword port: - :paramtype port: int - """ - super(FQDNEndpointDetail, self).__init__(**kwargs) - self.port = kwargs.get('port', None) - - -class FQDNEndpoints(msrest.serialization.Model): - """FQDNEndpoints. - - :ivar category: - :vartype category: str - :ivar endpoints: - :vartype endpoints: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoint] - """ - - _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'endpoints': {'key': 'endpoints', 'type': '[FQDNEndpoint]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: - :paramtype category: str - :keyword endpoints: - :paramtype endpoints: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoint] - """ - super(FQDNEndpoints, self).__init__(**kwargs) - self.category = kwargs.get('category', None) - self.endpoints = kwargs.get('endpoints', None) - - -class FQDNEndpointsPropertyBag(msrest.serialization.Model): - """Property bag for FQDN endpoints result. - - :ivar properties: - :vartype properties: ~azure.mgmt.machinelearningservices.models.FQDNEndpoints - """ - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'FQDNEndpoints'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: - :paramtype properties: ~azure.mgmt.machinelearningservices.models.FQDNEndpoints - """ - super(FQDNEndpointsPropertyBag, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class OutboundRule(msrest.serialization.Model): - """Outbound Rule for the managed network of a machine learning workspace. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FqdnOutboundRule, PrivateEndpointOutboundRule, ServiceTagOutboundRule. - - All required parameters must be populated in order to send to Azure. - - :ivar category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :ivar status: Type of a managed network Outbound Rule of a machine learning workspace. Possible - values include: "Inactive", "Active". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus - :ivar type: Required. Type of a managed network Outbound Rule of a machine learning - workspace.Constant filled by server. Possible values include: "FQDN", "PrivateEndpoint", - "ServiceTag". - :vartype type: str or ~azure.mgmt.machinelearningservices.models.RuleType - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - } - - _subtype_map = { - 'type': {'FQDN': 'FqdnOutboundRule', 'PrivateEndpoint': 'PrivateEndpointOutboundRule', 'ServiceTag': 'ServiceTagOutboundRule'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :keyword status: Type of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Inactive", "Active". - :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus - """ - super(OutboundRule, self).__init__(**kwargs) - self.category = kwargs.get('category', None) - self.status = kwargs.get('status', None) - self.type = None # type: Optional[str] - - -class FqdnOutboundRule(OutboundRule): - """FQDN Outbound Rule for the managed network of a machine learning workspace. - - All required parameters must be populated in order to send to Azure. - - :ivar category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :ivar status: Type of a managed network Outbound Rule of a machine learning workspace. Possible - values include: "Inactive", "Active". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus - :ivar type: Required. Type of a managed network Outbound Rule of a machine learning - workspace.Constant filled by server. Possible values include: "FQDN", "PrivateEndpoint", - "ServiceTag". - :vartype type: str or ~azure.mgmt.machinelearningservices.models.RuleType - :ivar destination: - :vartype destination: str - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'destination': {'key': 'destination', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :keyword status: Type of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Inactive", "Active". - :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus - :keyword destination: - :paramtype destination: str - """ - super(FqdnOutboundRule, self).__init__(**kwargs) - self.type = 'FQDN' # type: str - self.destination = kwargs.get('destination', None) - - -class GenerationSafetyQualityMetricThreshold(msrest.serialization.Model): - """Generation safety quality metric threshold definition. - - All required parameters must be populated in order to send to Azure. - - :ivar metric: Required. [Required] Gets or sets the feature attribution metric to calculate. - Possible values include: "AcceptableGroundednessScorePerInstance", - "AggregatedGroundednessPassRate", "AcceptableCoherenceScorePerInstance", - "AggregatedCoherencePassRate", "AcceptableFluencyScorePerInstance", - "AggregatedFluencyPassRate", "AcceptableSimilarityScorePerInstance", - "AggregatedSimilarityPassRate", "AcceptableRelevanceScorePerInstance", - "AggregatedRelevancePassRate". - :vartype metric: str or - ~azure.mgmt.machinelearningservices.models.GenerationSafetyQualityMetric - :ivar threshold: Gets or sets the threshold value. - If null, a default value will be set depending on the selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - - _validation = { - 'metric': {'required': True}, - } - - _attribute_map = { - 'metric': {'key': 'metric', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword metric: Required. [Required] Gets or sets the feature attribution metric to calculate. - Possible values include: "AcceptableGroundednessScorePerInstance", - "AggregatedGroundednessPassRate", "AcceptableCoherenceScorePerInstance", - "AggregatedCoherencePassRate", "AcceptableFluencyScorePerInstance", - "AggregatedFluencyPassRate", "AcceptableSimilarityScorePerInstance", - "AggregatedSimilarityPassRate", "AcceptableRelevanceScorePerInstance", - "AggregatedRelevancePassRate". - :paramtype metric: str or - ~azure.mgmt.machinelearningservices.models.GenerationSafetyQualityMetric - :keyword threshold: Gets or sets the threshold value. - If null, a default value will be set depending on the selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - super(GenerationSafetyQualityMetricThreshold, self).__init__(**kwargs) - self.metric = kwargs['metric'] - self.threshold = kwargs.get('threshold', None) - - -class GenerationSafetyQualityMonitoringSignal(MonitoringSignalBase): - """Generation safety quality monitoring signal definition. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", - "Enabled". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". - :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType - :ivar metric_thresholds: Required. [Required] Gets or sets the metrics to calculate and the - corresponding thresholds. - :vartype metric_thresholds: - list[~azure.mgmt.machinelearningservices.models.GenerationSafetyQualityMetricThreshold] - :ivar production_data: Gets or sets the target data for computing metrics. - :vartype production_data: - list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] - :ivar sampling_rate: Required. [Required] The sample rate of the target data, should be greater - than 0 and at most 1. - :vartype sampling_rate: float - :ivar workspace_connection_id: Gets or sets the workspace connection ID used to connect to the - content generation endpoint. - :vartype workspace_connection_id: str - """ - - _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'sampling_rate': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[GenerationSafetyQualityMetricThreshold]'}, - 'production_data': {'key': 'productionData', 'type': '[MonitoringInputDataBase]'}, - 'sampling_rate': {'key': 'samplingRate', 'type': 'float'}, - 'workspace_connection_id': {'key': 'workspaceConnectionId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword metric_thresholds: Required. [Required] Gets or sets the metrics to calculate and the - corresponding thresholds. - :paramtype metric_thresholds: - list[~azure.mgmt.machinelearningservices.models.GenerationSafetyQualityMetricThreshold] - :keyword production_data: Gets or sets the target data for computing metrics. - :paramtype production_data: - list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] - :keyword sampling_rate: Required. [Required] The sample rate of the target data, should be - greater than 0 and at most 1. - :paramtype sampling_rate: float - :keyword workspace_connection_id: Gets or sets the workspace connection ID used to connect to - the content generation endpoint. - :paramtype workspace_connection_id: str - """ - super(GenerationSafetyQualityMonitoringSignal, self).__init__(**kwargs) - self.signal_type = 'GenerationSafetyQuality' # type: str - self.metric_thresholds = kwargs['metric_thresholds'] - self.production_data = kwargs.get('production_data', None) - self.sampling_rate = kwargs['sampling_rate'] - self.workspace_connection_id = kwargs.get('workspace_connection_id', None) - - -class GenerationTokenStatisticsMetricThreshold(msrest.serialization.Model): - """Generation token statistics metric threshold definition. - - All required parameters must be populated in order to send to Azure. - - :ivar metric: Required. [Required] Gets or sets the feature attribution metric to calculate. - Possible values include: "TotalTokenCount", "TotalTokenCountPerGroup". - :vartype metric: str or - ~azure.mgmt.machinelearningservices.models.GenerationTokenStatisticsMetric - :ivar threshold: Gets or sets the threshold value. - If null, a default value will be set depending on the selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - - _validation = { - 'metric': {'required': True}, - } - - _attribute_map = { - 'metric': {'key': 'metric', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword metric: Required. [Required] Gets or sets the feature attribution metric to calculate. - Possible values include: "TotalTokenCount", "TotalTokenCountPerGroup". - :paramtype metric: str or - ~azure.mgmt.machinelearningservices.models.GenerationTokenStatisticsMetric - :keyword threshold: Gets or sets the threshold value. - If null, a default value will be set depending on the selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - """ - super(GenerationTokenStatisticsMetricThreshold, self).__init__(**kwargs) - self.metric = kwargs['metric'] - self.threshold = kwargs.get('threshold', None) - - -class GenerationTokenStatisticsSignal(MonitoringSignalBase): - """Generation token statistics signal definition. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", - "Enabled". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". - :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType - :ivar metric_thresholds: Required. [Required] Gets or sets the metrics to calculate and the - corresponding thresholds. - :vartype metric_thresholds: - list[~azure.mgmt.machinelearningservices.models.GenerationTokenStatisticsMetricThreshold] - :ivar production_data: Gets or sets the target data for computing metrics. - :vartype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :ivar sampling_rate: Required. [Required] The sample rate of the target data, should be greater - than 0 and at most 1. - :vartype sampling_rate: float - """ - - _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'sampling_rate': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[GenerationTokenStatisticsMetricThreshold]'}, - 'production_data': {'key': 'productionData', 'type': 'MonitoringInputDataBase'}, - 'sampling_rate': {'key': 'samplingRate', 'type': 'float'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword metric_thresholds: Required. [Required] Gets or sets the metrics to calculate and the - corresponding thresholds. - :paramtype metric_thresholds: - list[~azure.mgmt.machinelearningservices.models.GenerationTokenStatisticsMetricThreshold] - :keyword production_data: Gets or sets the target data for computing metrics. - :paramtype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :keyword sampling_rate: Required. [Required] The sample rate of the target data, should be - greater than 0 and at most 1. - :paramtype sampling_rate: float - """ - super(GenerationTokenStatisticsSignal, self).__init__(**kwargs) - self.signal_type = 'GenerationTokenStatistics' # type: str - self.metric_thresholds = kwargs['metric_thresholds'] - self.production_data = kwargs.get('production_data', None) - self.sampling_rate = kwargs['sampling_rate'] - - -class GridSamplingAlgorithm(SamplingAlgorithm): - """Defines a Sampling Algorithm that exhaustively generates every value combination in the space. - - All required parameters must be populated in order to send to Azure. - - :ivar sampling_algorithm_type: Required. [Required] The algorithm used for generating - hyperparameter values, along with configuration properties.Constant filled by server. Possible - values include: "Grid", "Random", "Bayesian". - :vartype sampling_algorithm_type: str or - ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType - """ - - _validation = { - 'sampling_algorithm_type': {'required': True}, - } - - _attribute_map = { - 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(GridSamplingAlgorithm, self).__init__(**kwargs) - self.sampling_algorithm_type = 'Grid' # type: str - - -class HdfsDatastore(DatastoreProperties): - """HdfsDatastore. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar credentials: Required. [Required] Account credentials. - :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled - by server. Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", - "AzureFile", "Hdfs", "OneLake". - :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType - :ivar intellectual_property: Intellectual Property details. - :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar is_default: Readonly property to indicate if datastore is the workspace default - datastore. - :vartype is_default: bool - :ivar hdfs_server_certificate: The TLS cert of the HDFS server. Needs to be a base64 encoded - string. Required if "Https" protocol is selected. - :vartype hdfs_server_certificate: str - :ivar name_node_address: Required. [Required] IP Address or DNS HostName. - :vartype name_node_address: str - :ivar protocol: Protocol used to communicate with the storage account (Https/Http). - :vartype protocol: str - """ - - _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'name_node_address': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'hdfs_server_certificate': {'key': 'hdfsServerCertificate', 'type': 'str'}, - 'name_node_address': {'key': 'nameNodeAddress', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword credentials: Required. [Required] Account credentials. - :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :keyword intellectual_property: Intellectual Property details. - :paramtype intellectual_property: - ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword hdfs_server_certificate: The TLS cert of the HDFS server. Needs to be a base64 encoded - string. Required if "Https" protocol is selected. - :paramtype hdfs_server_certificate: str - :keyword name_node_address: Required. [Required] IP Address or DNS HostName. - :paramtype name_node_address: str - :keyword protocol: Protocol used to communicate with the storage account (Https/Http). - :paramtype protocol: str - """ - super(HdfsDatastore, self).__init__(**kwargs) - self.datastore_type = 'Hdfs' # type: str - self.hdfs_server_certificate = kwargs.get('hdfs_server_certificate', None) - self.name_node_address = kwargs['name_node_address'] - self.protocol = kwargs.get('protocol', "http") - - -class HDInsightSchema(msrest.serialization.Model): - """HDInsightSchema. - - :ivar properties: HDInsight compute properties. - :vartype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties - """ - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: HDInsight compute properties. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties - """ - super(HDInsightSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class HDInsight(Compute, HDInsightSchema): - """A HDInsight compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar properties: HDInsight compute properties. - :vartype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar compute_location: Location for the underlying compute. - :vartype compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar description: The description of the Machine Learning compute. - :vartype description: str - :ivar created_on: The time at which the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The time at which the compute was last modified. - :vartype modified_on: ~datetime.datetime - :ivar resource_id: ARM resource id of the underlying compute. - :vartype resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI - and AAD exclusively for authentication. - :vartype disable_local_auth: bool - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: HDInsight compute properties. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties - :keyword compute_location: Location for the underlying compute. - :paramtype compute_location: str - :keyword description: The description of the Machine Learning compute. - :paramtype description: str - :keyword resource_id: ARM resource id of the underlying compute. - :paramtype resource_id: str - :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only - MSI and AAD exclusively for authentication. - :paramtype disable_local_auth: bool - """ - super(HDInsight, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'HDInsight' # type: str - self.compute_location = kwargs.get('compute_location', None) - self.provisioning_state = None - self.description = kwargs.get('description', None) - self.created_on = None - self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) - self.provisioning_errors = None - self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) - - -class HDInsightProperties(msrest.serialization.Model): - """HDInsight compute properties. - - :ivar ssh_port: Port open for ssh connections on the master node of the cluster. - :vartype ssh_port: int - :ivar address: Public IP address of the master node of the cluster. - :vartype address: str - :ivar administrator_account: Admin credentials for master node of the cluster. - :vartype administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - """ - - _attribute_map = { - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword ssh_port: Port open for ssh connections on the master node of the cluster. - :paramtype ssh_port: int - :keyword address: Public IP address of the master node of the cluster. - :paramtype address: str - :keyword administrator_account: Admin credentials for master node of the cluster. - :paramtype administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - """ - super(HDInsightProperties, self).__init__(**kwargs) - self.ssh_port = kwargs.get('ssh_port', None) - self.address = kwargs.get('address', None) - self.administrator_account = kwargs.get('administrator_account', None) - - -class IdAssetReference(AssetReferenceBase): - """Reference to an asset via its ARM resource ID. - - All required parameters must be populated in order to send to Azure. - - :ivar reference_type: Required. [Required] Specifies the type of asset reference.Constant - filled by server. Possible values include: "Id", "DataPath", "OutputPath". - :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType - :ivar asset_id: Required. [Required] ARM resource ID of the asset. - :vartype asset_id: str - """ - - _validation = { - 'reference_type': {'required': True}, - 'asset_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - 'asset_id': {'key': 'assetId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword asset_id: Required. [Required] ARM resource ID of the asset. - :paramtype asset_id: str - """ - super(IdAssetReference, self).__init__(**kwargs) - self.reference_type = 'Id' # type: str - self.asset_id = kwargs['asset_id'] - - -class IdentityForCmk(msrest.serialization.Model): - """Identity object used for encryption. - - :ivar user_assigned_identity: UserAssignedIdentity to be used to fetch the encryption key from - keyVault. - :vartype user_assigned_identity: str - """ - - _attribute_map = { - 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword user_assigned_identity: UserAssignedIdentity to be used to fetch the encryption key - from keyVault. - :paramtype user_assigned_identity: str - """ - super(IdentityForCmk, self).__init__(**kwargs) - self.user_assigned_identity = kwargs.get('user_assigned_identity', None) - - -class IdleShutdownSetting(msrest.serialization.Model): - """Stops compute instance after user defined period of inactivity. - - :ivar idle_time_before_shutdown: Time is defined in ISO8601 format. Minimum is 15 min, maximum - is 3 days. - :vartype idle_time_before_shutdown: str - """ - - _attribute_map = { - 'idle_time_before_shutdown': {'key': 'idleTimeBeforeShutdown', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword idle_time_before_shutdown: Time is defined in ISO8601 format. Minimum is 15 min, - maximum is 3 days. - :paramtype idle_time_before_shutdown: str - """ - super(IdleShutdownSetting, self).__init__(**kwargs) - self.idle_time_before_shutdown = kwargs.get('idle_time_before_shutdown', None) - - -class Image(msrest.serialization.Model): - """Image. - - :ivar additional_properties: Unmatched properties from the message are deserialized to this - collection. - :vartype additional_properties: dict[str, any] - :ivar type: Type of the image. Possible values are: docker - For docker images. azureml - For - AzureML images. Possible values include: "docker", "azureml". Default value: "docker". - :vartype type: str or ~azure.mgmt.machinelearningservices.models.ImageType - :ivar reference: Image reference URL. - :vartype reference: str - """ - - _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'reference': {'key': 'reference', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword additional_properties: Unmatched properties from the message are deserialized to this - collection. - :paramtype additional_properties: dict[str, any] - :keyword type: Type of the image. Possible values are: docker - For docker images. azureml - - For AzureML images. Possible values include: "docker", "azureml". Default value: "docker". - :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ImageType - :keyword reference: Image reference URL. - :paramtype reference: str - """ - super(Image, self).__init__(**kwargs) - self.additional_properties = kwargs.get('additional_properties', None) - self.type = kwargs.get('type', "docker") - self.reference = kwargs.get('reference', None) - - -class ImageVertical(msrest.serialization.Model): - """Abstract class for AutoML tasks that train image (computer vision) models - -such as Image Classification / Image Classification Multilabel / Image Object Detection / Image Instance Segmentation. - - All required parameters must be populated in order to send to Azure. - - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - """ - - _validation = { - 'limit_settings': {'required': True}, - } - - _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - """ - super(ImageVertical, self).__init__(**kwargs) - self.limit_settings = kwargs['limit_settings'] - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - - -class ImageClassificationBase(ImageVertical): - """ImageClassificationBase. - - All required parameters must be populated in order to send to Azure. - - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - """ - - _validation = { - 'limit_settings': {'required': True}, - } - - _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsClassification'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsClassification]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword model_settings: Settings used for training the model. - :paramtype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - """ - super(ImageClassificationBase, self).__init__(**kwargs) - self.model_settings = kwargs.get('model_settings', None) - self.search_space = kwargs.get('search_space', None) - - -class ImageClassification(AutoMLVertical, ImageClassificationBase): - """Image Classification. Multi-class image classification is used when an image is classified with only a single label -from a set of classes - e.g. each image is classified as either an image of a 'cat' or a 'dog' or a 'duck'. - - All required parameters must be populated in order to send to Azure. - - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric to optimize for this task. Possible values include: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics - """ - - _validation = { - 'limit_settings': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, - } - - _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsClassification'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsClassification]'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword model_settings: Settings used for training the model. - :paramtype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword primary_metric: Primary metric to optimize for this task. Possible values include: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted". - :paramtype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics - """ - super(ImageClassification, self).__init__(**kwargs) - self.limit_settings = kwargs['limit_settings'] - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.model_settings = kwargs.get('model_settings', None) - self.search_space = kwargs.get('search_space', None) - self.task_type = 'ImageClassification' # type: str - self.primary_metric = kwargs.get('primary_metric', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] - - -class ImageClassificationMultilabel(AutoMLVertical, ImageClassificationBase): - """Image Classification Multilabel. Multi-label image classification is used when an image could have one or more labels -from a set of labels - e.g. an image could be labeled with both 'cat' and 'dog'. - - All required parameters must be populated in order to send to Azure. - - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric to optimize for this task. Possible values include: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted", "IOU". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics - """ - - _validation = { - 'limit_settings': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, - } - - _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsClassification'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsClassification]'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword model_settings: Settings used for training the model. - :paramtype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsClassification - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword primary_metric: Primary metric to optimize for this task. Possible values include: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted", "IOU". - :paramtype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics - """ - super(ImageClassificationMultilabel, self).__init__(**kwargs) - self.limit_settings = kwargs['limit_settings'] - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.model_settings = kwargs.get('model_settings', None) - self.search_space = kwargs.get('search_space', None) - self.task_type = 'ImageClassificationMultilabel' # type: str - self.primary_metric = kwargs.get('primary_metric', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] - - -class ImageObjectDetectionBase(ImageVertical): - """ImageObjectDetectionBase. - - All required parameters must be populated in order to send to Azure. - - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - """ - - _validation = { - 'limit_settings': {'required': True}, - } - - _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsObjectDetection'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsObjectDetection]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword model_settings: Settings used for training the model. - :paramtype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - """ - super(ImageObjectDetectionBase, self).__init__(**kwargs) - self.model_settings = kwargs.get('model_settings', None) - self.search_space = kwargs.get('search_space', None) - - -class ImageInstanceSegmentation(AutoMLVertical, ImageObjectDetectionBase): - """Image Instance Segmentation. Instance segmentation is used to identify objects in an image at the pixel level, -drawing a polygon around each object in the image. - - All required parameters must be populated in order to send to Azure. - - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric to optimize for this task. Possible values include: - "MeanAveragePrecision". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics - """ - - _validation = { - 'limit_settings': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, - } - - _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsObjectDetection'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsObjectDetection]'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword model_settings: Settings used for training the model. - :paramtype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword primary_metric: Primary metric to optimize for this task. Possible values include: - "MeanAveragePrecision". - :paramtype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics - """ - super(ImageInstanceSegmentation, self).__init__(**kwargs) - self.limit_settings = kwargs['limit_settings'] - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.model_settings = kwargs.get('model_settings', None) - self.search_space = kwargs.get('search_space', None) - self.task_type = 'ImageInstanceSegmentation' # type: str - self.primary_metric = kwargs.get('primary_metric', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] - - -class ImageLimitSettings(msrest.serialization.Model): - """Limit settings for the AutoML job. - - :ivar max_concurrent_trials: Maximum number of concurrent AutoML iterations. - :vartype max_concurrent_trials: int - :ivar max_trials: Maximum number of AutoML iterations. - :vartype max_trials: int - :ivar timeout: AutoML job timeout. - :vartype timeout: ~datetime.timedelta - """ - - _attribute_map = { - 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, - 'max_trials': {'key': 'maxTrials', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword max_concurrent_trials: Maximum number of concurrent AutoML iterations. - :paramtype max_concurrent_trials: int - :keyword max_trials: Maximum number of AutoML iterations. - :paramtype max_trials: int - :keyword timeout: AutoML job timeout. - :paramtype timeout: ~datetime.timedelta - """ - super(ImageLimitSettings, self).__init__(**kwargs) - self.max_concurrent_trials = kwargs.get('max_concurrent_trials', 1) - self.max_trials = kwargs.get('max_trials', 1) - self.timeout = kwargs.get('timeout', "P7D") - - -class ImageMetadata(msrest.serialization.Model): - """Returns metadata about the operating system image for this compute instance. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar current_image_version: Specifies the current operating system image version this compute - instance is running on. - :vartype current_image_version: str - :ivar latest_image_version: Specifies the latest available operating system image version. - :vartype latest_image_version: str - :ivar is_latest_os_image_version: Specifies whether this compute instance is running on the - latest operating system image. - :vartype is_latest_os_image_version: bool - :ivar os_patching_status: Metadata about the os patching. - :vartype os_patching_status: ~azure.mgmt.machinelearningservices.models.OsPatchingStatus - """ - - _validation = { - 'os_patching_status': {'readonly': True}, - } - - _attribute_map = { - 'current_image_version': {'key': 'currentImageVersion', 'type': 'str'}, - 'latest_image_version': {'key': 'latestImageVersion', 'type': 'str'}, - 'is_latest_os_image_version': {'key': 'isLatestOsImageVersion', 'type': 'bool'}, - 'os_patching_status': {'key': 'osPatchingStatus', 'type': 'OsPatchingStatus'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword current_image_version: Specifies the current operating system image version this - compute instance is running on. - :paramtype current_image_version: str - :keyword latest_image_version: Specifies the latest available operating system image version. - :paramtype latest_image_version: str - :keyword is_latest_os_image_version: Specifies whether this compute instance is running on the - latest operating system image. - :paramtype is_latest_os_image_version: bool - """ - super(ImageMetadata, self).__init__(**kwargs) - self.current_image_version = kwargs.get('current_image_version', None) - self.latest_image_version = kwargs.get('latest_image_version', None) - self.is_latest_os_image_version = kwargs.get('is_latest_os_image_version', None) - self.os_patching_status = None - - -class ImageModelDistributionSettings(msrest.serialization.Model): - """Distribution expressions to sweep over values of model settings. - -:code:` -Some examples are: -``` -ModelName = "choice('seresnext', 'resnest50')"; -LearningRate = "uniform(0.001, 0.01)"; -LayersToFreeze = "choice(0, 2)"; -```` -All distributions can be specified as distribution_name(min, max) or choice(val1, val2, ..., valn) -where distribution name can be: uniform, quniform, loguniform, etc -For more details on how to compose distribution expressions please check the documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: str - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: str - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: str - :ivar distributed: Whether to use distributer training. - :vartype distributed: str - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: str - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: str - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: str - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: str - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: str - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: str - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: str - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: str - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :vartype learning_rate_scheduler: str - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: str - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: str - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: str - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: str - :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :vartype optimizer: str - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: str - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: str - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: str - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: str - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: str - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: str - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: str - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: str - """ - - _attribute_map = { - 'ams_gradient': {'key': 'amsGradient', 'type': 'str'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'str'}, - 'beta2': {'key': 'beta2', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'str'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'str'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'str'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'str'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'str'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'str'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'str'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'str'}, - 'nesterov': {'key': 'nesterov', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'str'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'str'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'str'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'str'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'str'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'str'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :paramtype ams_gradient: str - :keyword augmentations: Settings for using Augmentations. - :paramtype augmentations: str - :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta1: str - :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta2: str - :keyword distributed: Whether to use distributer training. - :paramtype distributed: str - :keyword early_stopping: Enable early stopping logic during training. - :paramtype early_stopping: str - :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait - before primary metric improvement - is tracked for early stopping. Must be a positive integer. - :paramtype early_stopping_delay: str - :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :paramtype early_stopping_patience: str - :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. - :paramtype enable_onnx_normalization: str - :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. - Must be a positive integer. - :paramtype evaluation_frequency: str - :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :paramtype gradient_accumulation_step: str - :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive - integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype layers_to_freeze: str - :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :paramtype learning_rate: str - :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :paramtype learning_rate_scheduler: str - :keyword model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype model_name: str - :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, - 1]. - :paramtype momentum: str - :keyword nesterov: Enable nesterov when optimizer is 'sgd'. - :paramtype nesterov: str - :keyword number_of_epochs: Number of training epochs. Must be a positive integer. - :paramtype number_of_epochs: str - :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. - :paramtype number_of_workers: str - :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :paramtype optimizer: str - :keyword random_seed: Random seed to be used when using deterministic training. - :paramtype random_seed: str - :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float - in the range [0, 1]. - :paramtype step_lr_gamma: str - :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be - a positive integer. - :paramtype step_lr_step_size: str - :keyword training_batch_size: Training batch size. Must be a positive integer. - :paramtype training_batch_size: str - :keyword validation_batch_size: Validation batch size. Must be a positive integer. - :paramtype validation_batch_size: str - :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :paramtype warmup_cosine_lr_cycles: str - :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :paramtype warmup_cosine_lr_warmup_epochs: str - :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must - be a float in the range[0, 1]. - :paramtype weight_decay: str - """ - super(ImageModelDistributionSettings, self).__init__(**kwargs) - self.ams_gradient = kwargs.get('ams_gradient', None) - self.augmentations = kwargs.get('augmentations', None) - self.beta1 = kwargs.get('beta1', None) - self.beta2 = kwargs.get('beta2', None) - self.distributed = kwargs.get('distributed', None) - self.early_stopping = kwargs.get('early_stopping', None) - self.early_stopping_delay = kwargs.get('early_stopping_delay', None) - self.early_stopping_patience = kwargs.get('early_stopping_patience', None) - self.enable_onnx_normalization = kwargs.get('enable_onnx_normalization', None) - self.evaluation_frequency = kwargs.get('evaluation_frequency', None) - self.gradient_accumulation_step = kwargs.get('gradient_accumulation_step', None) - self.layers_to_freeze = kwargs.get('layers_to_freeze', None) - self.learning_rate = kwargs.get('learning_rate', None) - self.learning_rate_scheduler = kwargs.get('learning_rate_scheduler', None) - self.model_name = kwargs.get('model_name', None) - self.momentum = kwargs.get('momentum', None) - self.nesterov = kwargs.get('nesterov', None) - self.number_of_epochs = kwargs.get('number_of_epochs', None) - self.number_of_workers = kwargs.get('number_of_workers', None) - self.optimizer = kwargs.get('optimizer', None) - self.random_seed = kwargs.get('random_seed', None) - self.step_lr_gamma = kwargs.get('step_lr_gamma', None) - self.step_lr_step_size = kwargs.get('step_lr_step_size', None) - self.training_batch_size = kwargs.get('training_batch_size', None) - self.validation_batch_size = kwargs.get('validation_batch_size', None) - self.warmup_cosine_lr_cycles = kwargs.get('warmup_cosine_lr_cycles', None) - self.warmup_cosine_lr_warmup_epochs = kwargs.get('warmup_cosine_lr_warmup_epochs', None) - self.weight_decay = kwargs.get('weight_decay', None) - - -class ImageModelDistributionSettingsClassification(ImageModelDistributionSettings): - """Distribution expressions to sweep over values of model settings. - -:code:` -Some examples are: -``` -ModelName = "choice('seresnext', 'resnest50')"; -LearningRate = "uniform(0.001, 0.01)"; -LayersToFreeze = "choice(0, 2)"; -```` -For more details on how to compose distribution expressions please check the documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: str - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: str - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: str - :ivar distributed: Whether to use distributer training. - :vartype distributed: str - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: str - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: str - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: str - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: str - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: str - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: str - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: str - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: str - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :vartype learning_rate_scheduler: str - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: str - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: str - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: str - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: str - :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :vartype optimizer: str - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: str - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: str - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: str - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: str - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: str - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: str - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: str - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: str - :ivar training_crop_size: Image crop size that is input to the neural network for the training - dataset. Must be a positive integer. - :vartype training_crop_size: str - :ivar validation_crop_size: Image crop size that is input to the neural network for the - validation dataset. Must be a positive integer. - :vartype validation_crop_size: str - :ivar validation_resize_size: Image size to which to resize before cropping for validation - dataset. Must be a positive integer. - :vartype validation_resize_size: str - :ivar weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. - 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be - 0 or 1 or 2. - :vartype weighted_loss: str - """ - - _attribute_map = { - 'ams_gradient': {'key': 'amsGradient', 'type': 'str'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'str'}, - 'beta2': {'key': 'beta2', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'str'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'str'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'str'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'str'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'str'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'str'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'str'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'str'}, - 'nesterov': {'key': 'nesterov', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'str'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'str'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'str'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'str'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'str'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'str'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, - 'training_crop_size': {'key': 'trainingCropSize', 'type': 'str'}, - 'validation_crop_size': {'key': 'validationCropSize', 'type': 'str'}, - 'validation_resize_size': {'key': 'validationResizeSize', 'type': 'str'}, - 'weighted_loss': {'key': 'weightedLoss', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :paramtype ams_gradient: str - :keyword augmentations: Settings for using Augmentations. - :paramtype augmentations: str - :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta1: str - :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta2: str - :keyword distributed: Whether to use distributer training. - :paramtype distributed: str - :keyword early_stopping: Enable early stopping logic during training. - :paramtype early_stopping: str - :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait - before primary metric improvement - is tracked for early stopping. Must be a positive integer. - :paramtype early_stopping_delay: str - :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :paramtype early_stopping_patience: str - :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. - :paramtype enable_onnx_normalization: str - :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. - Must be a positive integer. - :paramtype evaluation_frequency: str - :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :paramtype gradient_accumulation_step: str - :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive - integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype layers_to_freeze: str - :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :paramtype learning_rate: str - :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :paramtype learning_rate_scheduler: str - :keyword model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype model_name: str - :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, - 1]. - :paramtype momentum: str - :keyword nesterov: Enable nesterov when optimizer is 'sgd'. - :paramtype nesterov: str - :keyword number_of_epochs: Number of training epochs. Must be a positive integer. - :paramtype number_of_epochs: str - :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. - :paramtype number_of_workers: str - :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :paramtype optimizer: str - :keyword random_seed: Random seed to be used when using deterministic training. - :paramtype random_seed: str - :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float - in the range [0, 1]. - :paramtype step_lr_gamma: str - :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be - a positive integer. - :paramtype step_lr_step_size: str - :keyword training_batch_size: Training batch size. Must be a positive integer. - :paramtype training_batch_size: str - :keyword validation_batch_size: Validation batch size. Must be a positive integer. - :paramtype validation_batch_size: str - :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :paramtype warmup_cosine_lr_cycles: str - :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :paramtype warmup_cosine_lr_warmup_epochs: str - :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must - be a float in the range[0, 1]. - :paramtype weight_decay: str - :keyword training_crop_size: Image crop size that is input to the neural network for the - training dataset. Must be a positive integer. - :paramtype training_crop_size: str - :keyword validation_crop_size: Image crop size that is input to the neural network for the - validation dataset. Must be a positive integer. - :paramtype validation_crop_size: str - :keyword validation_resize_size: Image size to which to resize before cropping for validation - dataset. Must be a positive integer. - :paramtype validation_resize_size: str - :keyword weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. - 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be - 0 or 1 or 2. - :paramtype weighted_loss: str - """ - super(ImageModelDistributionSettingsClassification, self).__init__(**kwargs) - self.training_crop_size = kwargs.get('training_crop_size', None) - self.validation_crop_size = kwargs.get('validation_crop_size', None) - self.validation_resize_size = kwargs.get('validation_resize_size', None) - self.weighted_loss = kwargs.get('weighted_loss', None) - - -class ImageModelDistributionSettingsObjectDetection(ImageModelDistributionSettings): - """Distribution expressions to sweep over values of model settings. - -:code:` -Some examples are: -``` -ModelName = "choice('seresnext', 'resnest50')"; -LearningRate = "uniform(0.001, 0.01)"; -LayersToFreeze = "choice(0, 2)"; -```` -For more details on how to compose distribution expressions please check the documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: str - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: str - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: str - :ivar distributed: Whether to use distributer training. - :vartype distributed: str - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: str - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: str - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: str - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: str - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: str - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: str - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: str - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: str - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :vartype learning_rate_scheduler: str - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: str - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: str - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: str - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: str - :ivar optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :vartype optimizer: str - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: str - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: str - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: str - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: str - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: str - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: str - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: str - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: str - :ivar box_detections_per_image: Maximum number of detections per image, for all classes. Must - be a positive integer. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype box_detections_per_image: str - :ivar box_score_threshold: During inference, only return proposals with a classification score - greater than - BoxScoreThreshold. Must be a float in the range[0, 1]. - :vartype box_score_threshold: str - :ivar image_size: Image size for train and validation. Must be a positive integer. - Note: The training run may get into CUDA OOM if the size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype image_size: str - :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype max_size: str - :ivar min_size: Minimum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype min_size: str - :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. - Note: training run may get into CUDA OOM if the model size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype model_size: str - :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. - Note: training run may get into CUDA OOM if no sufficient GPU memory. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype multi_scale: str - :ivar nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be - float in the range [0, 1]. - :vartype nms_iou_threshold: str - :ivar tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must not - be - None to enable small object detection logic. A string containing two integers in mxn format. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_grid_size: str - :ivar tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be float - in the range [0, 1). - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_overlap_ratio: str - :ivar tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging - predictions from tiles and image. - Used in validation/ inference. Must be float in the range [0, 1]. - Note: This settings is not supported for the 'yolov5' algorithm. - NMS: Non-maximum suppression. - :vartype tile_predictions_nms_threshold: str - :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be - float in the range [0, 1]. - :vartype validation_iou_threshold: str - :ivar validation_metric_type: Metric computation method to use for validation metrics. Must be - 'none', 'coco', 'voc', or 'coco_voc'. - :vartype validation_metric_type: str - """ - - _attribute_map = { - 'ams_gradient': {'key': 'amsGradient', 'type': 'str'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'str'}, - 'beta2': {'key': 'beta2', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'str'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'str'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'str'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'str'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'str'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'str'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'str'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'str'}, - 'nesterov': {'key': 'nesterov', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'str'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'str'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'str'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'str'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'str'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'str'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, - 'box_detections_per_image': {'key': 'boxDetectionsPerImage', 'type': 'str'}, - 'box_score_threshold': {'key': 'boxScoreThreshold', 'type': 'str'}, - 'image_size': {'key': 'imageSize', 'type': 'str'}, - 'max_size': {'key': 'maxSize', 'type': 'str'}, - 'min_size': {'key': 'minSize', 'type': 'str'}, - 'model_size': {'key': 'modelSize', 'type': 'str'}, - 'multi_scale': {'key': 'multiScale', 'type': 'str'}, - 'nms_iou_threshold': {'key': 'nmsIouThreshold', 'type': 'str'}, - 'tile_grid_size': {'key': 'tileGridSize', 'type': 'str'}, - 'tile_overlap_ratio': {'key': 'tileOverlapRatio', 'type': 'str'}, - 'tile_predictions_nms_threshold': {'key': 'tilePredictionsNmsThreshold', 'type': 'str'}, - 'validation_iou_threshold': {'key': 'validationIouThreshold', 'type': 'str'}, - 'validation_metric_type': {'key': 'validationMetricType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :paramtype ams_gradient: str - :keyword augmentations: Settings for using Augmentations. - :paramtype augmentations: str - :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta1: str - :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta2: str - :keyword distributed: Whether to use distributer training. - :paramtype distributed: str - :keyword early_stopping: Enable early stopping logic during training. - :paramtype early_stopping: str - :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait - before primary metric improvement - is tracked for early stopping. Must be a positive integer. - :paramtype early_stopping_delay: str - :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :paramtype early_stopping_patience: str - :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. - :paramtype enable_onnx_normalization: str - :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. - Must be a positive integer. - :paramtype evaluation_frequency: str - :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :paramtype gradient_accumulation_step: str - :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive - integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype layers_to_freeze: str - :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :paramtype learning_rate: str - :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. - :paramtype learning_rate_scheduler: str - :keyword model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype model_name: str - :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, - 1]. - :paramtype momentum: str - :keyword nesterov: Enable nesterov when optimizer is 'sgd'. - :paramtype nesterov: str - :keyword number_of_epochs: Number of training epochs. Must be a positive integer. - :paramtype number_of_epochs: str - :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. - :paramtype number_of_workers: str - :keyword optimizer: Type of optimizer. Must be either 'sgd', 'adam', or 'adamw'. - :paramtype optimizer: str - :keyword random_seed: Random seed to be used when using deterministic training. - :paramtype random_seed: str - :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float - in the range [0, 1]. - :paramtype step_lr_gamma: str - :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be - a positive integer. - :paramtype step_lr_step_size: str - :keyword training_batch_size: Training batch size. Must be a positive integer. - :paramtype training_batch_size: str - :keyword validation_batch_size: Validation batch size. Must be a positive integer. - :paramtype validation_batch_size: str - :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :paramtype warmup_cosine_lr_cycles: str - :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :paramtype warmup_cosine_lr_warmup_epochs: str - :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must - be a float in the range[0, 1]. - :paramtype weight_decay: str - :keyword box_detections_per_image: Maximum number of detections per image, for all classes. - Must be a positive integer. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype box_detections_per_image: str - :keyword box_score_threshold: During inference, only return proposals with a classification - score greater than - BoxScoreThreshold. Must be a float in the range[0, 1]. - :paramtype box_score_threshold: str - :keyword image_size: Image size for train and validation. Must be a positive integer. - Note: The training run may get into CUDA OOM if the size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :paramtype image_size: str - :keyword max_size: Maximum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype max_size: str - :keyword min_size: Minimum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype min_size: str - :keyword model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. - Note: training run may get into CUDA OOM if the model size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :paramtype model_size: str - :keyword multi_scale: Enable multi-scale image by varying image size by +/- 50%. - Note: training run may get into CUDA OOM if no sufficient GPU memory. - Note: This settings is only supported for the 'yolov5' algorithm. - :paramtype multi_scale: str - :keyword nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be - float in the range [0, 1]. - :paramtype nms_iou_threshold: str - :keyword tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must - not be - None to enable small object detection logic. A string containing two integers in mxn format. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype tile_grid_size: str - :keyword tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be - float in the range [0, 1). - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype tile_overlap_ratio: str - :keyword tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging - predictions from tiles and image. - Used in validation/ inference. Must be float in the range [0, 1]. - Note: This settings is not supported for the 'yolov5' algorithm. - NMS: Non-maximum suppression. - :paramtype tile_predictions_nms_threshold: str - :keyword validation_iou_threshold: IOU threshold to use when computing validation metric. Must - be float in the range [0, 1]. - :paramtype validation_iou_threshold: str - :keyword validation_metric_type: Metric computation method to use for validation metrics. Must - be 'none', 'coco', 'voc', or 'coco_voc'. - :paramtype validation_metric_type: str - """ - super(ImageModelDistributionSettingsObjectDetection, self).__init__(**kwargs) - self.box_detections_per_image = kwargs.get('box_detections_per_image', None) - self.box_score_threshold = kwargs.get('box_score_threshold', None) - self.image_size = kwargs.get('image_size', None) - self.max_size = kwargs.get('max_size', None) - self.min_size = kwargs.get('min_size', None) - self.model_size = kwargs.get('model_size', None) - self.multi_scale = kwargs.get('multi_scale', None) - self.nms_iou_threshold = kwargs.get('nms_iou_threshold', None) - self.tile_grid_size = kwargs.get('tile_grid_size', None) - self.tile_overlap_ratio = kwargs.get('tile_overlap_ratio', None) - self.tile_predictions_nms_threshold = kwargs.get('tile_predictions_nms_threshold', None) - self.validation_iou_threshold = kwargs.get('validation_iou_threshold', None) - self.validation_metric_type = kwargs.get('validation_metric_type', None) - - -class ImageModelSettings(msrest.serialization.Model): - """Settings used for training the model. -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - - :ivar advanced_settings: Settings for advanced scenarios. - :vartype advanced_settings: str - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: bool - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: float - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: float - :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. - :vartype checkpoint_frequency: int - :ivar checkpoint_model: The pretrained checkpoint model for incremental training. - :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput - :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for - incremental training. - :vartype checkpoint_run_id: str - :ivar distributed: Whether to use distributed training. - :vartype distributed: bool - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: bool - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: int - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: int - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: bool - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: int - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: int - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: int - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: float - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". - :vartype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.LearningRateScheduler - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: float - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: bool - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: int - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: int - :ivar optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". - :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: int - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: float - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: int - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: int - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: int - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: float - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: int - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: float - """ - - _attribute_map = { - 'advanced_settings': {'key': 'advancedSettings', 'type': 'str'}, - 'ams_gradient': {'key': 'amsGradient', 'type': 'bool'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'float'}, - 'beta2': {'key': 'beta2', 'type': 'float'}, - 'checkpoint_frequency': {'key': 'checkpointFrequency', 'type': 'int'}, - 'checkpoint_model': {'key': 'checkpointModel', 'type': 'MLFlowModelJobInput'}, - 'checkpoint_run_id': {'key': 'checkpointRunId', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'bool'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'bool'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'int'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'int'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'bool'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'int'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'int'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'int'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'float'}, - 'nesterov': {'key': 'nesterov', 'type': 'bool'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'int'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'float'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'int'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'float'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'int'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword advanced_settings: Settings for advanced scenarios. - :paramtype advanced_settings: str - :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :paramtype ams_gradient: bool - :keyword augmentations: Settings for using Augmentations. - :paramtype augmentations: str - :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta1: float - :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta2: float - :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive - integer. - :paramtype checkpoint_frequency: int - :keyword checkpoint_model: The pretrained checkpoint model for incremental training. - :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput - :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for - incremental training. - :paramtype checkpoint_run_id: str - :keyword distributed: Whether to use distributed training. - :paramtype distributed: bool - :keyword early_stopping: Enable early stopping logic during training. - :paramtype early_stopping: bool - :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait - before primary metric improvement - is tracked for early stopping. Must be a positive integer. - :paramtype early_stopping_delay: int - :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :paramtype early_stopping_patience: int - :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. - :paramtype enable_onnx_normalization: bool - :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. - Must be a positive integer. - :paramtype evaluation_frequency: int - :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :paramtype gradient_accumulation_step: int - :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive - integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype layers_to_freeze: int - :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :paramtype learning_rate: float - :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". - :paramtype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.LearningRateScheduler - :keyword model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype model_name: str - :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, - 1]. - :paramtype momentum: float - :keyword nesterov: Enable nesterov when optimizer is 'sgd'. - :paramtype nesterov: bool - :keyword number_of_epochs: Number of training epochs. Must be a positive integer. - :paramtype number_of_epochs: int - :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. - :paramtype number_of_workers: int - :keyword optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". - :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer - :keyword random_seed: Random seed to be used when using deterministic training. - :paramtype random_seed: int - :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float - in the range [0, 1]. - :paramtype step_lr_gamma: float - :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be - a positive integer. - :paramtype step_lr_step_size: int - :keyword training_batch_size: Training batch size. Must be a positive integer. - :paramtype training_batch_size: int - :keyword validation_batch_size: Validation batch size. Must be a positive integer. - :paramtype validation_batch_size: int - :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :paramtype warmup_cosine_lr_cycles: float - :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :paramtype warmup_cosine_lr_warmup_epochs: int - :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must - be a float in the range[0, 1]. - :paramtype weight_decay: float - """ - super(ImageModelSettings, self).__init__(**kwargs) - self.advanced_settings = kwargs.get('advanced_settings', None) - self.ams_gradient = kwargs.get('ams_gradient', None) - self.augmentations = kwargs.get('augmentations', None) - self.beta1 = kwargs.get('beta1', None) - self.beta2 = kwargs.get('beta2', None) - self.checkpoint_frequency = kwargs.get('checkpoint_frequency', None) - self.checkpoint_model = kwargs.get('checkpoint_model', None) - self.checkpoint_run_id = kwargs.get('checkpoint_run_id', None) - self.distributed = kwargs.get('distributed', None) - self.early_stopping = kwargs.get('early_stopping', None) - self.early_stopping_delay = kwargs.get('early_stopping_delay', None) - self.early_stopping_patience = kwargs.get('early_stopping_patience', None) - self.enable_onnx_normalization = kwargs.get('enable_onnx_normalization', None) - self.evaluation_frequency = kwargs.get('evaluation_frequency', None) - self.gradient_accumulation_step = kwargs.get('gradient_accumulation_step', None) - self.layers_to_freeze = kwargs.get('layers_to_freeze', None) - self.learning_rate = kwargs.get('learning_rate', None) - self.learning_rate_scheduler = kwargs.get('learning_rate_scheduler', None) - self.model_name = kwargs.get('model_name', None) - self.momentum = kwargs.get('momentum', None) - self.nesterov = kwargs.get('nesterov', None) - self.number_of_epochs = kwargs.get('number_of_epochs', None) - self.number_of_workers = kwargs.get('number_of_workers', None) - self.optimizer = kwargs.get('optimizer', None) - self.random_seed = kwargs.get('random_seed', None) - self.step_lr_gamma = kwargs.get('step_lr_gamma', None) - self.step_lr_step_size = kwargs.get('step_lr_step_size', None) - self.training_batch_size = kwargs.get('training_batch_size', None) - self.validation_batch_size = kwargs.get('validation_batch_size', None) - self.warmup_cosine_lr_cycles = kwargs.get('warmup_cosine_lr_cycles', None) - self.warmup_cosine_lr_warmup_epochs = kwargs.get('warmup_cosine_lr_warmup_epochs', None) - self.weight_decay = kwargs.get('weight_decay', None) - - -class ImageModelSettingsClassification(ImageModelSettings): - """Settings used for training the model. -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - - :ivar advanced_settings: Settings for advanced scenarios. - :vartype advanced_settings: str - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: bool - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: float - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: float - :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. - :vartype checkpoint_frequency: int - :ivar checkpoint_model: The pretrained checkpoint model for incremental training. - :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput - :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for - incremental training. - :vartype checkpoint_run_id: str - :ivar distributed: Whether to use distributed training. - :vartype distributed: bool - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: bool - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: int - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: int - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: bool - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: int - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: int - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: int - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: float - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". - :vartype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.LearningRateScheduler - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: float - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: bool - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: int - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: int - :ivar optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". - :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: int - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: float - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: int - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: int - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: int - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: float - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: int - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: float - :ivar training_crop_size: Image crop size that is input to the neural network for the training - dataset. Must be a positive integer. - :vartype training_crop_size: int - :ivar validation_crop_size: Image crop size that is input to the neural network for the - validation dataset. Must be a positive integer. - :vartype validation_crop_size: int - :ivar validation_resize_size: Image size to which to resize before cropping for validation - dataset. Must be a positive integer. - :vartype validation_resize_size: int - :ivar weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. - 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be - 0 or 1 or 2. - :vartype weighted_loss: int - """ - - _attribute_map = { - 'advanced_settings': {'key': 'advancedSettings', 'type': 'str'}, - 'ams_gradient': {'key': 'amsGradient', 'type': 'bool'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'float'}, - 'beta2': {'key': 'beta2', 'type': 'float'}, - 'checkpoint_frequency': {'key': 'checkpointFrequency', 'type': 'int'}, - 'checkpoint_model': {'key': 'checkpointModel', 'type': 'MLFlowModelJobInput'}, - 'checkpoint_run_id': {'key': 'checkpointRunId', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'bool'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'bool'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'int'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'int'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'bool'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'int'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'int'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'int'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'float'}, - 'nesterov': {'key': 'nesterov', 'type': 'bool'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'int'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'float'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'int'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'float'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'int'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, - 'training_crop_size': {'key': 'trainingCropSize', 'type': 'int'}, - 'validation_crop_size': {'key': 'validationCropSize', 'type': 'int'}, - 'validation_resize_size': {'key': 'validationResizeSize', 'type': 'int'}, - 'weighted_loss': {'key': 'weightedLoss', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword advanced_settings: Settings for advanced scenarios. - :paramtype advanced_settings: str - :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :paramtype ams_gradient: bool - :keyword augmentations: Settings for using Augmentations. - :paramtype augmentations: str - :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta1: float - :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta2: float - :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive - integer. - :paramtype checkpoint_frequency: int - :keyword checkpoint_model: The pretrained checkpoint model for incremental training. - :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput - :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for - incremental training. - :paramtype checkpoint_run_id: str - :keyword distributed: Whether to use distributed training. - :paramtype distributed: bool - :keyword early_stopping: Enable early stopping logic during training. - :paramtype early_stopping: bool - :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait - before primary metric improvement - is tracked for early stopping. Must be a positive integer. - :paramtype early_stopping_delay: int - :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :paramtype early_stopping_patience: int - :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. - :paramtype enable_onnx_normalization: bool - :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. - Must be a positive integer. - :paramtype evaluation_frequency: int - :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :paramtype gradient_accumulation_step: int - :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive - integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype layers_to_freeze: int - :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :paramtype learning_rate: float - :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". - :paramtype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.LearningRateScheduler - :keyword model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype model_name: str - :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, - 1]. - :paramtype momentum: float - :keyword nesterov: Enable nesterov when optimizer is 'sgd'. - :paramtype nesterov: bool - :keyword number_of_epochs: Number of training epochs. Must be a positive integer. - :paramtype number_of_epochs: int - :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. - :paramtype number_of_workers: int - :keyword optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". - :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer - :keyword random_seed: Random seed to be used when using deterministic training. - :paramtype random_seed: int - :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float - in the range [0, 1]. - :paramtype step_lr_gamma: float - :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be - a positive integer. - :paramtype step_lr_step_size: int - :keyword training_batch_size: Training batch size. Must be a positive integer. - :paramtype training_batch_size: int - :keyword validation_batch_size: Validation batch size. Must be a positive integer. - :paramtype validation_batch_size: int - :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :paramtype warmup_cosine_lr_cycles: float - :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :paramtype warmup_cosine_lr_warmup_epochs: int - :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must - be a float in the range[0, 1]. - :paramtype weight_decay: float - :keyword training_crop_size: Image crop size that is input to the neural network for the - training dataset. Must be a positive integer. - :paramtype training_crop_size: int - :keyword validation_crop_size: Image crop size that is input to the neural network for the - validation dataset. Must be a positive integer. - :paramtype validation_crop_size: int - :keyword validation_resize_size: Image size to which to resize before cropping for validation - dataset. Must be a positive integer. - :paramtype validation_resize_size: int - :keyword weighted_loss: Weighted loss. The accepted values are 0 for no weighted loss. - 1 for weighted loss with sqrt.(class_weights). 2 for weighted loss with class_weights. Must be - 0 or 1 or 2. - :paramtype weighted_loss: int - """ - super(ImageModelSettingsClassification, self).__init__(**kwargs) - self.training_crop_size = kwargs.get('training_crop_size', None) - self.validation_crop_size = kwargs.get('validation_crop_size', None) - self.validation_resize_size = kwargs.get('validation_resize_size', None) - self.weighted_loss = kwargs.get('weighted_loss', None) - - -class ImageModelSettingsObjectDetection(ImageModelSettings): - """Settings used for training the model. -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - - :ivar advanced_settings: Settings for advanced scenarios. - :vartype advanced_settings: str - :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :vartype ams_gradient: bool - :ivar augmentations: Settings for using Augmentations. - :vartype augmentations: str - :ivar beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta1: float - :ivar beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the range - [0, 1]. - :vartype beta2: float - :ivar checkpoint_frequency: Frequency to store model checkpoints. Must be a positive integer. - :vartype checkpoint_frequency: int - :ivar checkpoint_model: The pretrained checkpoint model for incremental training. - :vartype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput - :ivar checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for - incremental training. - :vartype checkpoint_run_id: str - :ivar distributed: Whether to use distributed training. - :vartype distributed: bool - :ivar early_stopping: Enable early stopping logic during training. - :vartype early_stopping: bool - :ivar early_stopping_delay: Minimum number of epochs or validation evaluations to wait before - primary metric improvement - is tracked for early stopping. Must be a positive integer. - :vartype early_stopping_delay: int - :ivar early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :vartype early_stopping_patience: int - :ivar enable_onnx_normalization: Enable normalization when exporting ONNX model. - :vartype enable_onnx_normalization: bool - :ivar evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. Must - be a positive integer. - :vartype evaluation_frequency: int - :ivar gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :vartype gradient_accumulation_step: int - :ivar layers_to_freeze: Number of layers to freeze for the model. Must be a positive integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype layers_to_freeze: int - :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :vartype learning_rate: float - :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". - :vartype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.LearningRateScheduler - :ivar model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :vartype model_name: str - :ivar momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, 1]. - :vartype momentum: float - :ivar nesterov: Enable nesterov when optimizer is 'sgd'. - :vartype nesterov: bool - :ivar number_of_epochs: Number of training epochs. Must be a positive integer. - :vartype number_of_epochs: int - :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. - :vartype number_of_workers: int - :ivar optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". - :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer - :ivar random_seed: Random seed to be used when using deterministic training. - :vartype random_seed: int - :ivar step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float in - the range [0, 1]. - :vartype step_lr_gamma: float - :ivar step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be a - positive integer. - :vartype step_lr_step_size: int - :ivar training_batch_size: Training batch size. Must be a positive integer. - :vartype training_batch_size: int - :ivar validation_batch_size: Validation batch size. Must be a positive integer. - :vartype validation_batch_size: int - :ivar warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :vartype warmup_cosine_lr_cycles: float - :ivar warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :vartype warmup_cosine_lr_warmup_epochs: int - :ivar weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must be - a float in the range[0, 1]. - :vartype weight_decay: float - :ivar box_detections_per_image: Maximum number of detections per image, for all classes. Must - be a positive integer. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype box_detections_per_image: int - :ivar box_score_threshold: During inference, only return proposals with a classification score - greater than - BoxScoreThreshold. Must be a float in the range[0, 1]. - :vartype box_score_threshold: float - :ivar image_size: Image size for train and validation. Must be a positive integer. - Note: The training run may get into CUDA OOM if the size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype image_size: int - :ivar log_training_metrics: Enable computing and logging training metrics. Possible values - include: "Enable", "Disable". - :vartype log_training_metrics: str or - ~azure.mgmt.machinelearningservices.models.LogTrainingMetrics - :ivar log_validation_loss: Enable computing and logging validation loss. Possible values - include: "Enable", "Disable". - :vartype log_validation_loss: str or - ~azure.mgmt.machinelearningservices.models.LogValidationLoss - :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype max_size: int - :ivar min_size: Minimum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype min_size: int - :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. - Note: training run may get into CUDA OOM if the model size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. Possible values include: - "None", "Small", "Medium", "Large", "ExtraLarge". - :vartype model_size: str or ~azure.mgmt.machinelearningservices.models.ModelSize - :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. - Note: training run may get into CUDA OOM if no sufficient GPU memory. - Note: This settings is only supported for the 'yolov5' algorithm. - :vartype multi_scale: bool - :ivar nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be a - float in the range [0, 1]. - :vartype nms_iou_threshold: float - :ivar tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must not - be - None to enable small object detection logic. A string containing two integers in mxn format. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_grid_size: str - :ivar tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be float - in the range [0, 1). - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_overlap_ratio: float - :ivar tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging - predictions from tiles and image. - Used in validation/ inference. Must be float in the range [0, 1]. - Note: This settings is not supported for the 'yolov5' algorithm. - :vartype tile_predictions_nms_threshold: float - :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be - float in the range [0, 1]. - :vartype validation_iou_threshold: float - :ivar validation_metric_type: Metric computation method to use for validation metrics. Possible - values include: "None", "Coco", "Voc", "CocoVoc". - :vartype validation_metric_type: str or - ~azure.mgmt.machinelearningservices.models.ValidationMetricType - """ - - _attribute_map = { - 'advanced_settings': {'key': 'advancedSettings', 'type': 'str'}, - 'ams_gradient': {'key': 'amsGradient', 'type': 'bool'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'float'}, - 'beta2': {'key': 'beta2', 'type': 'float'}, - 'checkpoint_frequency': {'key': 'checkpointFrequency', 'type': 'int'}, - 'checkpoint_model': {'key': 'checkpointModel', 'type': 'MLFlowModelJobInput'}, - 'checkpoint_run_id': {'key': 'checkpointRunId', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'bool'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'bool'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'int'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'int'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'bool'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'int'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'int'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'int'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'float'}, - 'nesterov': {'key': 'nesterov', 'type': 'bool'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'int'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'float'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'int'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'float'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'int'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, - 'box_detections_per_image': {'key': 'boxDetectionsPerImage', 'type': 'int'}, - 'box_score_threshold': {'key': 'boxScoreThreshold', 'type': 'float'}, - 'image_size': {'key': 'imageSize', 'type': 'int'}, - 'log_training_metrics': {'key': 'logTrainingMetrics', 'type': 'str'}, - 'log_validation_loss': {'key': 'logValidationLoss', 'type': 'str'}, - 'max_size': {'key': 'maxSize', 'type': 'int'}, - 'min_size': {'key': 'minSize', 'type': 'int'}, - 'model_size': {'key': 'modelSize', 'type': 'str'}, - 'multi_scale': {'key': 'multiScale', 'type': 'bool'}, - 'nms_iou_threshold': {'key': 'nmsIouThreshold', 'type': 'float'}, - 'tile_grid_size': {'key': 'tileGridSize', 'type': 'str'}, - 'tile_overlap_ratio': {'key': 'tileOverlapRatio', 'type': 'float'}, - 'tile_predictions_nms_threshold': {'key': 'tilePredictionsNmsThreshold', 'type': 'float'}, - 'validation_iou_threshold': {'key': 'validationIouThreshold', 'type': 'float'}, - 'validation_metric_type': {'key': 'validationMetricType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword advanced_settings: Settings for advanced scenarios. - :paramtype advanced_settings: str - :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. - :paramtype ams_gradient: bool - :keyword augmentations: Settings for using Augmentations. - :paramtype augmentations: str - :keyword beta1: Value of 'beta1' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta1: float - :keyword beta2: Value of 'beta2' when optimizer is 'adam' or 'adamw'. Must be a float in the - range [0, 1]. - :paramtype beta2: float - :keyword checkpoint_frequency: Frequency to store model checkpoints. Must be a positive - integer. - :paramtype checkpoint_frequency: int - :keyword checkpoint_model: The pretrained checkpoint model for incremental training. - :paramtype checkpoint_model: ~azure.mgmt.machinelearningservices.models.MLFlowModelJobInput - :keyword checkpoint_run_id: The id of a previous run that has a pretrained checkpoint for - incremental training. - :paramtype checkpoint_run_id: str - :keyword distributed: Whether to use distributed training. - :paramtype distributed: bool - :keyword early_stopping: Enable early stopping logic during training. - :paramtype early_stopping: bool - :keyword early_stopping_delay: Minimum number of epochs or validation evaluations to wait - before primary metric improvement - is tracked for early stopping. Must be a positive integer. - :paramtype early_stopping_delay: int - :keyword early_stopping_patience: Minimum number of epochs or validation evaluations with no - primary metric improvement before - the run is stopped. Must be a positive integer. - :paramtype early_stopping_patience: int - :keyword enable_onnx_normalization: Enable normalization when exporting ONNX model. - :paramtype enable_onnx_normalization: bool - :keyword evaluation_frequency: Frequency to evaluate validation dataset to get metric scores. - Must be a positive integer. - :paramtype evaluation_frequency: int - :keyword gradient_accumulation_step: Gradient accumulation means running a configured number of - "GradAccumulationStep" steps without - updating the model weights while accumulating the gradients of those steps, and then using - the accumulated gradients to compute the weight updates. Must be a positive integer. - :paramtype gradient_accumulation_step: int - :keyword layers_to_freeze: Number of layers to freeze for the model. Must be a positive - integer. - For instance, passing 2 as value for 'seresnext' means - freezing layer0 and layer1. For a full list of models supported and details on layer freeze, - please - see: https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype layers_to_freeze: int - :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. - :paramtype learning_rate: float - :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". - :paramtype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.LearningRateScheduler - :keyword model_name: Name of the model to use for training. - For more information on the available models please visit the official documentation: - https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. - :paramtype model_name: str - :keyword momentum: Value of momentum when optimizer is 'sgd'. Must be a float in the range [0, - 1]. - :paramtype momentum: float - :keyword nesterov: Enable nesterov when optimizer is 'sgd'. - :paramtype nesterov: bool - :keyword number_of_epochs: Number of training epochs. Must be a positive integer. - :paramtype number_of_epochs: int - :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. - :paramtype number_of_workers: int - :keyword optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". - :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer - :keyword random_seed: Random seed to be used when using deterministic training. - :paramtype random_seed: int - :keyword step_lr_gamma: Value of gamma when learning rate scheduler is 'step'. Must be a float - in the range [0, 1]. - :paramtype step_lr_gamma: float - :keyword step_lr_step_size: Value of step size when learning rate scheduler is 'step'. Must be - a positive integer. - :paramtype step_lr_step_size: int - :keyword training_batch_size: Training batch size. Must be a positive integer. - :paramtype training_batch_size: int - :keyword validation_batch_size: Validation batch size. Must be a positive integer. - :paramtype validation_batch_size: int - :keyword warmup_cosine_lr_cycles: Value of cosine cycle when learning rate scheduler is - 'warmup_cosine'. Must be a float in the range [0, 1]. - :paramtype warmup_cosine_lr_cycles: float - :keyword warmup_cosine_lr_warmup_epochs: Value of warmup epochs when learning rate scheduler is - 'warmup_cosine'. Must be a positive integer. - :paramtype warmup_cosine_lr_warmup_epochs: int - :keyword weight_decay: Value of weight decay when optimizer is 'sgd', 'adam', or 'adamw'. Must - be a float in the range[0, 1]. - :paramtype weight_decay: float - :keyword box_detections_per_image: Maximum number of detections per image, for all classes. - Must be a positive integer. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype box_detections_per_image: int - :keyword box_score_threshold: During inference, only return proposals with a classification - score greater than - BoxScoreThreshold. Must be a float in the range[0, 1]. - :paramtype box_score_threshold: float - :keyword image_size: Image size for train and validation. Must be a positive integer. - Note: The training run may get into CUDA OOM if the size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. - :paramtype image_size: int - :keyword log_training_metrics: Enable computing and logging training metrics. Possible values - include: "Enable", "Disable". - :paramtype log_training_metrics: str or - ~azure.mgmt.machinelearningservices.models.LogTrainingMetrics - :keyword log_validation_loss: Enable computing and logging validation loss. Possible values - include: "Enable", "Disable". - :paramtype log_validation_loss: str or - ~azure.mgmt.machinelearningservices.models.LogValidationLoss - :keyword max_size: Maximum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype max_size: int - :keyword min_size: Minimum size of the image to be rescaled before feeding it to the backbone. - Must be a positive integer. Note: training run may get into CUDA OOM if the size is too big. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype min_size: int - :keyword model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. - Note: training run may get into CUDA OOM if the model size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. Possible values include: - "None", "Small", "Medium", "Large", "ExtraLarge". - :paramtype model_size: str or ~azure.mgmt.machinelearningservices.models.ModelSize - :keyword multi_scale: Enable multi-scale image by varying image size by +/- 50%. - Note: training run may get into CUDA OOM if no sufficient GPU memory. - Note: This settings is only supported for the 'yolov5' algorithm. - :paramtype multi_scale: bool - :keyword nms_iou_threshold: IOU threshold used during inference in NMS post processing. Must be - a float in the range [0, 1]. - :paramtype nms_iou_threshold: float - :keyword tile_grid_size: The grid size to use for tiling each image. Note: TileGridSize must - not be - None to enable small object detection logic. A string containing two integers in mxn format. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype tile_grid_size: str - :keyword tile_overlap_ratio: Overlap ratio between adjacent tiles in each dimension. Must be - float in the range [0, 1). - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype tile_overlap_ratio: float - :keyword tile_predictions_nms_threshold: The IOU threshold to use to perform NMS while merging - predictions from tiles and image. - Used in validation/ inference. Must be float in the range [0, 1]. - Note: This settings is not supported for the 'yolov5' algorithm. - :paramtype tile_predictions_nms_threshold: float - :keyword validation_iou_threshold: IOU threshold to use when computing validation metric. Must - be float in the range [0, 1]. - :paramtype validation_iou_threshold: float - :keyword validation_metric_type: Metric computation method to use for validation metrics. - Possible values include: "None", "Coco", "Voc", "CocoVoc". - :paramtype validation_metric_type: str or - ~azure.mgmt.machinelearningservices.models.ValidationMetricType - """ - super(ImageModelSettingsObjectDetection, self).__init__(**kwargs) - self.box_detections_per_image = kwargs.get('box_detections_per_image', None) - self.box_score_threshold = kwargs.get('box_score_threshold', None) - self.image_size = kwargs.get('image_size', None) - self.log_training_metrics = kwargs.get('log_training_metrics', None) - self.log_validation_loss = kwargs.get('log_validation_loss', None) - self.max_size = kwargs.get('max_size', None) - self.min_size = kwargs.get('min_size', None) - self.model_size = kwargs.get('model_size', None) - self.multi_scale = kwargs.get('multi_scale', None) - self.nms_iou_threshold = kwargs.get('nms_iou_threshold', None) - self.tile_grid_size = kwargs.get('tile_grid_size', None) - self.tile_overlap_ratio = kwargs.get('tile_overlap_ratio', None) - self.tile_predictions_nms_threshold = kwargs.get('tile_predictions_nms_threshold', None) - self.validation_iou_threshold = kwargs.get('validation_iou_threshold', None) - self.validation_metric_type = kwargs.get('validation_metric_type', None) - - -class ImageObjectDetection(AutoMLVertical, ImageObjectDetectionBase): - """Image Object Detection. Object detection is used to identify objects in an image and locate each object with a -bounding box e.g. locate all dogs and cats in an image and draw a bounding box around each. - - All required parameters must be populated in order to send to Azure. - - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar model_settings: Settings used for training the model. - :vartype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric to optimize for this task. Possible values include: - "MeanAveragePrecision". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics - """ - - _validation = { - 'limit_settings': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, - } - - _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsObjectDetection'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsObjectDetection]'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings - :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword model_settings: Settings used for training the model. - :paramtype model_settings: - ~azure.mgmt.machinelearningservices.models.ImageModelSettingsObjectDetection - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword primary_metric: Primary metric to optimize for this task. Possible values include: - "MeanAveragePrecision". - :paramtype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics - """ - super(ImageObjectDetection, self).__init__(**kwargs) - self.limit_settings = kwargs['limit_settings'] - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.model_settings = kwargs.get('model_settings', None) - self.search_space = kwargs.get('search_space', None) - self.task_type = 'ImageObjectDetection' # type: str - self.primary_metric = kwargs.get('primary_metric', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] - - -class ImageSweepSettings(msrest.serialization.Model): - """Model sweeping and hyperparameter sweeping related settings. - - All required parameters must be populated in order to send to Azure. - - :ivar early_termination: Type of early termination policy. - :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :ivar sampling_algorithm: Required. [Required] Type of the hyperparameter sampling algorithms. - Possible values include: "Grid", "Random", "Bayesian". - :vartype sampling_algorithm: str or - ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType - """ - - _validation = { - 'sampling_algorithm': {'required': True}, - } - - _attribute_map = { - 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, - 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword early_termination: Type of early termination policy. - :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :keyword sampling_algorithm: Required. [Required] Type of the hyperparameter sampling - algorithms. Possible values include: "Grid", "Random", "Bayesian". - :paramtype sampling_algorithm: str or - ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType - """ - super(ImageSweepSettings, self).__init__(**kwargs) - self.early_termination = kwargs.get('early_termination', None) - self.sampling_algorithm = kwargs['sampling_algorithm'] - - -class ImportDataAction(ScheduleActionBase): - """ImportDataAction. - - All required parameters must be populated in order to send to Azure. - - :ivar action_type: Required. [Required] Specifies the action type of the schedule.Constant - filled by server. Possible values include: "CreateJob", "InvokeBatchEndpoint", "ImportData", - "CreateMonitor". - :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType - :ivar data_import_definition: Required. [Required] Defines Schedule action definition details. - :vartype data_import_definition: ~azure.mgmt.machinelearningservices.models.DataImport - """ - - _validation = { - 'action_type': {'required': True}, - 'data_import_definition': {'required': True}, - } - - _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, - 'data_import_definition': {'key': 'dataImportDefinition', 'type': 'DataImport'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword data_import_definition: Required. [Required] Defines Schedule action definition - details. - :paramtype data_import_definition: ~azure.mgmt.machinelearningservices.models.DataImport - """ - super(ImportDataAction, self).__init__(**kwargs) - self.action_type = 'ImportData' # type: str - self.data_import_definition = kwargs['data_import_definition'] - - -class IndexColumn(msrest.serialization.Model): - """Dto object representing index column. - - :ivar column_name: Specifies the column name. - :vartype column_name: str - :ivar data_type: Specifies the data type. Possible values include: "String", "Integer", "Long", - "Float", "Double", "Binary", "Datetime", "Boolean". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.FeatureDataType - """ - - _attribute_map = { - 'column_name': {'key': 'columnName', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword column_name: Specifies the column name. - :paramtype column_name: str - :keyword data_type: Specifies the data type. Possible values include: "String", "Integer", - "Long", "Float", "Double", "Binary", "Datetime", "Boolean". - :paramtype data_type: str or ~azure.mgmt.machinelearningservices.models.FeatureDataType - """ - super(IndexColumn, self).__init__(**kwargs) - self.column_name = kwargs.get('column_name', None) - self.data_type = kwargs.get('data_type', None) - - -class InferenceContainerProperties(msrest.serialization.Model): - """InferenceContainerProperties. - - :ivar liveness_route: The route to check the liveness of the inference server container. - :vartype liveness_route: ~azure.mgmt.machinelearningservices.models.Route - :ivar readiness_route: The route to check the readiness of the inference server container. - :vartype readiness_route: ~azure.mgmt.machinelearningservices.models.Route - :ivar scoring_route: The port to send the scoring requests to, within the inference server - container. - :vartype scoring_route: ~azure.mgmt.machinelearningservices.models.Route - """ - - _attribute_map = { - 'liveness_route': {'key': 'livenessRoute', 'type': 'Route'}, - 'readiness_route': {'key': 'readinessRoute', 'type': 'Route'}, - 'scoring_route': {'key': 'scoringRoute', 'type': 'Route'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword liveness_route: The route to check the liveness of the inference server container. - :paramtype liveness_route: ~azure.mgmt.machinelearningservices.models.Route - :keyword readiness_route: The route to check the readiness of the inference server container. - :paramtype readiness_route: ~azure.mgmt.machinelearningservices.models.Route - :keyword scoring_route: The port to send the scoring requests to, within the inference server - container. - :paramtype scoring_route: ~azure.mgmt.machinelearningservices.models.Route - """ - super(InferenceContainerProperties, self).__init__(**kwargs) - self.liveness_route = kwargs.get('liveness_route', None) - self.readiness_route = kwargs.get('readiness_route', None) - self.scoring_route = kwargs.get('scoring_route', None) - - -class InstanceTypeSchema(msrest.serialization.Model): - """Instance type schema. - - :ivar node_selector: Node Selector. - :vartype node_selector: dict[str, str] - :ivar resources: Resource requests/limits for this instance type. - :vartype resources: ~azure.mgmt.machinelearningservices.models.InstanceTypeSchemaResources - """ - - _attribute_map = { - 'node_selector': {'key': 'nodeSelector', 'type': '{str}'}, - 'resources': {'key': 'resources', 'type': 'InstanceTypeSchemaResources'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword node_selector: Node Selector. - :paramtype node_selector: dict[str, str] - :keyword resources: Resource requests/limits for this instance type. - :paramtype resources: ~azure.mgmt.machinelearningservices.models.InstanceTypeSchemaResources - """ - super(InstanceTypeSchema, self).__init__(**kwargs) - self.node_selector = kwargs.get('node_selector', None) - self.resources = kwargs.get('resources', None) - - -class InstanceTypeSchemaResources(msrest.serialization.Model): - """Resource requests/limits for this instance type. - - :ivar requests: Resource requests for this instance type. - :vartype requests: dict[str, str] - :ivar limits: Resource limits for this instance type. - :vartype limits: dict[str, str] - """ - - _attribute_map = { - 'requests': {'key': 'requests', 'type': '{str}'}, - 'limits': {'key': 'limits', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword requests: Resource requests for this instance type. - :paramtype requests: dict[str, str] - :keyword limits: Resource limits for this instance type. - :paramtype limits: dict[str, str] - """ - super(InstanceTypeSchemaResources, self).__init__(**kwargs) - self.requests = kwargs.get('requests', None) - self.limits = kwargs.get('limits', None) - - -class IntellectualProperty(msrest.serialization.Model): - """Intellectual Property details for a resource. - - All required parameters must be populated in order to send to Azure. - - :ivar protection_level: Protection level of the Intellectual Property. Possible values include: - "All", "None". - :vartype protection_level: str or ~azure.mgmt.machinelearningservices.models.ProtectionLevel - :ivar publisher: Required. [Required] Publisher of the Intellectual Property. Must be the same - as Registry publisher name. - :vartype publisher: str - """ - - _validation = { - 'publisher': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'protection_level': {'key': 'protectionLevel', 'type': 'str'}, - 'publisher': {'key': 'publisher', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword protection_level: Protection level of the Intellectual Property. Possible values - include: "All", "None". - :paramtype protection_level: str or ~azure.mgmt.machinelearningservices.models.ProtectionLevel - :keyword publisher: Required. [Required] Publisher of the Intellectual Property. Must be the - same as Registry publisher name. - :paramtype publisher: str - """ - super(IntellectualProperty, self).__init__(**kwargs) - self.protection_level = kwargs.get('protection_level', None) - self.publisher = kwargs['publisher'] - - -class JobBase(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.JobBaseProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'JobBaseProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.JobBaseProperties - """ - super(JobBase, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class JobBaseResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of JobBase entities. - - :ivar next_link: The link to the next page of JobBase objects. If null, there are no additional - pages. - :vartype next_link: str - :ivar value: An array of objects of type JobBase. - :vartype value: list[~azure.mgmt.machinelearningservices.models.JobBase] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[JobBase]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of JobBase objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type JobBase. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.JobBase] - """ - super(JobBaseResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class JobResourceConfiguration(ResourceConfiguration): - """JobResourceConfiguration. - - :ivar instance_count: Optional number of instances or nodes used by the compute target. - :vartype instance_count: int - :ivar instance_type: Optional type of VM used as supported by the compute target. - :vartype instance_type: str - :ivar locations: Locations where the job can run. - :vartype locations: list[str] - :ivar max_instance_count: Optional max allowed number of instances or nodes to be used by the - compute target. - For use with elastic training, currently supported by PyTorch distribution type only. - :vartype max_instance_count: int - :ivar properties: Additional properties bag. - :vartype properties: dict[str, any] - :ivar docker_args: Extra arguments to pass to the Docker run command. This would override any - parameters that have already been set by the system, or in this section. This parameter is only - supported for Azure ML compute types. - :vartype docker_args: str - :ivar shm_size: Size of the docker container's shared memory block. This should be in the - format of (number)(unit) where number as to be greater than 0 and the unit can be one of - b(bytes), k(kilobytes), m(megabytes), or g(gigabytes). - :vartype shm_size: str - """ - - _validation = { - 'shm_size': {'pattern': r'\d+[bBkKmMgG]'}, - } - - _attribute_map = { - 'instance_count': {'key': 'instanceCount', 'type': 'int'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'locations': {'key': 'locations', 'type': '[str]'}, - 'max_instance_count': {'key': 'maxInstanceCount', 'type': 'int'}, - 'properties': {'key': 'properties', 'type': '{object}'}, - 'docker_args': {'key': 'dockerArgs', 'type': 'str'}, - 'shm_size': {'key': 'shmSize', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword instance_count: Optional number of instances or nodes used by the compute target. - :paramtype instance_count: int - :keyword instance_type: Optional type of VM used as supported by the compute target. - :paramtype instance_type: str - :keyword locations: Locations where the job can run. - :paramtype locations: list[str] - :keyword max_instance_count: Optional max allowed number of instances or nodes to be used by - the compute target. - For use with elastic training, currently supported by PyTorch distribution type only. - :paramtype max_instance_count: int - :keyword properties: Additional properties bag. - :paramtype properties: dict[str, any] - :keyword docker_args: Extra arguments to pass to the Docker run command. This would override - any parameters that have already been set by the system, or in this section. This parameter is - only supported for Azure ML compute types. - :paramtype docker_args: str - :keyword shm_size: Size of the docker container's shared memory block. This should be in the - format of (number)(unit) where number as to be greater than 0 and the unit can be one of - b(bytes), k(kilobytes), m(megabytes), or g(gigabytes). - :paramtype shm_size: str - """ - super(JobResourceConfiguration, self).__init__(**kwargs) - self.docker_args = kwargs.get('docker_args', None) - self.shm_size = kwargs.get('shm_size', "2g") - - -class JobScheduleAction(ScheduleActionBase): - """JobScheduleAction. - - All required parameters must be populated in order to send to Azure. - - :ivar action_type: Required. [Required] Specifies the action type of the schedule.Constant - filled by server. Possible values include: "CreateJob", "InvokeBatchEndpoint", "ImportData", - "CreateMonitor". - :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType - :ivar job_definition: Required. [Required] Defines Schedule action definition details. - :vartype job_definition: ~azure.mgmt.machinelearningservices.models.JobBaseProperties - """ - - _validation = { - 'action_type': {'required': True}, - 'job_definition': {'required': True}, - } - - _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, - 'job_definition': {'key': 'jobDefinition', 'type': 'JobBaseProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword job_definition: Required. [Required] Defines Schedule action definition details. - :paramtype job_definition: ~azure.mgmt.machinelearningservices.models.JobBaseProperties - """ - super(JobScheduleAction, self).__init__(**kwargs) - self.action_type = 'CreateJob' # type: str - self.job_definition = kwargs['job_definition'] - - -class JobService(msrest.serialization.Model): - """Job endpoint definition. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar endpoint: Url for endpoint. - :vartype endpoint: str - :ivar error_message: Any error in the service. - :vartype error_message: str - :ivar job_service_type: Endpoint type. - :vartype job_service_type: str - :ivar nodes: Nodes that user would like to start the service on. - If Nodes is not set or set to null, the service will only be started on leader node. - :vartype nodes: ~azure.mgmt.machinelearningservices.models.Nodes - :ivar port: Port for endpoint set by user. - :vartype port: int - :ivar properties: Additional properties to set on the endpoint. - :vartype properties: dict[str, str] - :ivar status: Status of endpoint. - :vartype status: str - """ - - _validation = { - 'error_message': {'readonly': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'job_service_type': {'key': 'jobServiceType', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': 'Nodes'}, - 'port': {'key': 'port', 'type': 'int'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword endpoint: Url for endpoint. - :paramtype endpoint: str - :keyword job_service_type: Endpoint type. - :paramtype job_service_type: str - :keyword nodes: Nodes that user would like to start the service on. - If Nodes is not set or set to null, the service will only be started on leader node. - :paramtype nodes: ~azure.mgmt.machinelearningservices.models.Nodes - :keyword port: Port for endpoint set by user. - :paramtype port: int - :keyword properties: Additional properties to set on the endpoint. - :paramtype properties: dict[str, str] - """ - super(JobService, self).__init__(**kwargs) - self.endpoint = kwargs.get('endpoint', None) - self.error_message = None - self.job_service_type = kwargs.get('job_service_type', None) - self.nodes = kwargs.get('nodes', None) - self.port = kwargs.get('port', None) - self.properties = kwargs.get('properties', None) - self.status = None - - -class KerberosCredentials(msrest.serialization.Model): - """KerberosCredentials. - - All required parameters must be populated in order to send to Azure. - - :ivar kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. - :vartype kerberos_kdc_address: str - :ivar kerberos_principal: Required. [Required] Kerberos Username. - :vartype kerberos_principal: str - :ivar kerberos_realm: Required. [Required] Domain over which a Kerberos authentication server - has the authority to authenticate a user, host or service. - :vartype kerberos_realm: str - """ - - _validation = { - 'kerberos_kdc_address': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_principal': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_realm': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'kerberos_kdc_address': {'key': 'kerberosKdcAddress', 'type': 'str'}, - 'kerberos_principal': {'key': 'kerberosPrincipal', 'type': 'str'}, - 'kerberos_realm': {'key': 'kerberosRealm', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. - :paramtype kerberos_kdc_address: str - :keyword kerberos_principal: Required. [Required] Kerberos Username. - :paramtype kerberos_principal: str - :keyword kerberos_realm: Required. [Required] Domain over which a Kerberos authentication - server has the authority to authenticate a user, host or service. - :paramtype kerberos_realm: str - """ - super(KerberosCredentials, self).__init__(**kwargs) - self.kerberos_kdc_address = kwargs['kerberos_kdc_address'] - self.kerberos_principal = kwargs['kerberos_principal'] - self.kerberos_realm = kwargs['kerberos_realm'] - - -class KerberosKeytabCredentials(DatastoreCredentials, KerberosCredentials): - """KerberosKeytabCredentials. - - All required parameters must be populated in order to send to Azure. - - :ivar kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. - :vartype kerberos_kdc_address: str - :ivar kerberos_principal: Required. [Required] Kerberos Username. - :vartype kerberos_principal: str - :ivar kerberos_realm: Required. [Required] Domain over which a Kerberos authentication server - has the authority to authenticate a user, host or service. - :vartype kerberos_realm: str - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". - :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType - :ivar secrets: Required. [Required] Keytab secrets. - :vartype secrets: ~azure.mgmt.machinelearningservices.models.KerberosKeytabSecrets - """ - - _validation = { - 'kerberos_kdc_address': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_principal': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_realm': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'credentials_type': {'required': True}, - 'secrets': {'required': True}, - } - - _attribute_map = { - 'kerberos_kdc_address': {'key': 'kerberosKdcAddress', 'type': 'str'}, - 'kerberos_principal': {'key': 'kerberosPrincipal', 'type': 'str'}, - 'kerberos_realm': {'key': 'kerberosRealm', 'type': 'str'}, - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'KerberosKeytabSecrets'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. - :paramtype kerberos_kdc_address: str - :keyword kerberos_principal: Required. [Required] Kerberos Username. - :paramtype kerberos_principal: str - :keyword kerberos_realm: Required. [Required] Domain over which a Kerberos authentication - server has the authority to authenticate a user, host or service. - :paramtype kerberos_realm: str - :keyword secrets: Required. [Required] Keytab secrets. - :paramtype secrets: ~azure.mgmt.machinelearningservices.models.KerberosKeytabSecrets - """ - super(KerberosKeytabCredentials, self).__init__(**kwargs) - self.kerberos_kdc_address = kwargs['kerberos_kdc_address'] - self.kerberos_principal = kwargs['kerberos_principal'] - self.kerberos_realm = kwargs['kerberos_realm'] - self.credentials_type = 'KerberosKeytab' # type: str - self.secrets = kwargs['secrets'] - - -class KerberosKeytabSecrets(DatastoreSecrets): - """KerberosKeytabSecrets. - - All required parameters must be populated in order to send to Azure. - - :ivar secrets_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", "Sas", - "ServicePrincipal", "KerberosPassword", "KerberosKeytab". - :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType - :ivar kerberos_keytab: Kerberos keytab secret. - :vartype kerberos_keytab: str - """ - - _validation = { - 'secrets_type': {'required': True}, - } - - _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'kerberos_keytab': {'key': 'kerberosKeytab', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword kerberos_keytab: Kerberos keytab secret. - :paramtype kerberos_keytab: str - """ - super(KerberosKeytabSecrets, self).__init__(**kwargs) - self.secrets_type = 'KerberosKeytab' # type: str - self.kerberos_keytab = kwargs.get('kerberos_keytab', None) - - -class KerberosPasswordCredentials(DatastoreCredentials, KerberosCredentials): - """KerberosPasswordCredentials. - - All required parameters must be populated in order to send to Azure. - - :ivar kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. - :vartype kerberos_kdc_address: str - :ivar kerberos_principal: Required. [Required] Kerberos Username. - :vartype kerberos_principal: str - :ivar kerberos_realm: Required. [Required] Domain over which a Kerberos authentication server - has the authority to authenticate a user, host or service. - :vartype kerberos_realm: str - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". - :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType - :ivar secrets: Required. [Required] Kerberos password secrets. - :vartype secrets: ~azure.mgmt.machinelearningservices.models.KerberosPasswordSecrets - """ - - _validation = { - 'kerberos_kdc_address': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_principal': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_realm': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'credentials_type': {'required': True}, - 'secrets': {'required': True}, - } - - _attribute_map = { - 'kerberos_kdc_address': {'key': 'kerberosKdcAddress', 'type': 'str'}, - 'kerberos_principal': {'key': 'kerberosPrincipal', 'type': 'str'}, - 'kerberos_realm': {'key': 'kerberosRealm', 'type': 'str'}, - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'KerberosPasswordSecrets'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. - :paramtype kerberos_kdc_address: str - :keyword kerberos_principal: Required. [Required] Kerberos Username. - :paramtype kerberos_principal: str - :keyword kerberos_realm: Required. [Required] Domain over which a Kerberos authentication - server has the authority to authenticate a user, host or service. - :paramtype kerberos_realm: str - :keyword secrets: Required. [Required] Kerberos password secrets. - :paramtype secrets: ~azure.mgmt.machinelearningservices.models.KerberosPasswordSecrets - """ - super(KerberosPasswordCredentials, self).__init__(**kwargs) - self.kerberos_kdc_address = kwargs['kerberos_kdc_address'] - self.kerberos_principal = kwargs['kerberos_principal'] - self.kerberos_realm = kwargs['kerberos_realm'] - self.credentials_type = 'KerberosPassword' # type: str - self.secrets = kwargs['secrets'] - - -class KerberosPasswordSecrets(DatastoreSecrets): - """KerberosPasswordSecrets. - - All required parameters must be populated in order to send to Azure. - - :ivar secrets_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", "Sas", - "ServicePrincipal", "KerberosPassword", "KerberosKeytab". - :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType - :ivar kerberos_password: Kerberos password secret. - :vartype kerberos_password: str - """ - - _validation = { - 'secrets_type': {'required': True}, - } - - _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'kerberos_password': {'key': 'kerberosPassword', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword kerberos_password: Kerberos password secret. - :paramtype kerberos_password: str - """ - super(KerberosPasswordSecrets, self).__init__(**kwargs) - self.secrets_type = 'KerberosPassword' # type: str - self.kerberos_password = kwargs.get('kerberos_password', None) - - -class KeyVaultProperties(msrest.serialization.Model): - """Customer Key vault properties. - - All required parameters must be populated in order to send to Azure. - - :ivar identity_client_id: Currently, we support only SystemAssigned MSI. - We need this when we support UserAssignedIdentities. - :vartype identity_client_id: str - :ivar key_identifier: Required. KeyVault key identifier to encrypt the data. - :vartype key_identifier: str - :ivar key_vault_arm_id: Required. KeyVault Arm Id that contains the data encryption key. - :vartype key_vault_arm_id: str - """ - - _validation = { - 'key_identifier': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'key_vault_arm_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'identity_client_id': {'key': 'identityClientId', 'type': 'str'}, - 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, - 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword identity_client_id: Currently, we support only SystemAssigned MSI. - We need this when we support UserAssignedIdentities. - :paramtype identity_client_id: str - :keyword key_identifier: Required. KeyVault key identifier to encrypt the data. - :paramtype key_identifier: str - :keyword key_vault_arm_id: Required. KeyVault Arm Id that contains the data encryption key. - :paramtype key_vault_arm_id: str - """ - super(KeyVaultProperties, self).__init__(**kwargs) - self.identity_client_id = kwargs.get('identity_client_id', None) - self.key_identifier = kwargs['key_identifier'] - self.key_vault_arm_id = kwargs['key_vault_arm_id'] - - -class KubernetesSchema(msrest.serialization.Model): - """Kubernetes Compute Schema. - - :ivar properties: Properties of Kubernetes. - :vartype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties - """ - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'KubernetesProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Properties of Kubernetes. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties - """ - super(KubernetesSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class Kubernetes(Compute, KubernetesSchema): - """A Machine Learning compute based on Kubernetes Compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar properties: Properties of Kubernetes. - :vartype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar compute_location: Location for the underlying compute. - :vartype compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar description: The description of the Machine Learning compute. - :vartype description: str - :ivar created_on: The time at which the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The time at which the compute was last modified. - :vartype modified_on: ~datetime.datetime - :ivar resource_id: ARM resource id of the underlying compute. - :vartype resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI - and AAD exclusively for authentication. - :vartype disable_local_auth: bool - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'KubernetesProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Properties of Kubernetes. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties - :keyword compute_location: Location for the underlying compute. - :paramtype compute_location: str - :keyword description: The description of the Machine Learning compute. - :paramtype description: str - :keyword resource_id: ARM resource id of the underlying compute. - :paramtype resource_id: str - :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only - MSI and AAD exclusively for authentication. - :paramtype disable_local_auth: bool - """ - super(Kubernetes, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'Kubernetes' # type: str - self.compute_location = kwargs.get('compute_location', None) - self.provisioning_state = None - self.description = kwargs.get('description', None) - self.created_on = None - self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) - self.provisioning_errors = None - self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) - - -class OnlineDeploymentProperties(EndpointDeploymentPropertiesBase): - """OnlineDeploymentProperties. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: KubernetesOnlineDeployment, ManagedOnlineDeployment. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar code_configuration: Code configuration for the endpoint deployment. - :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :ivar description: Description of the endpoint deployment. - :vartype description: str - :ivar environment_id: ARM resource ID of the environment specification for the endpoint - deployment. - :vartype environment_id: str - :ivar environment_variables: Environment variables configuration for the deployment. - :vartype environment_variables: dict[str, str] - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar app_insights_enabled: If true, enables Application Insights logging. - :vartype app_insights_enabled: bool - :ivar data_collector: The mdc configuration, we disable mdc when it's null. - :vartype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector - :ivar egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Possible values include: "Enabled", - "Disabled". - :vartype egress_public_network_access: str or - ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :ivar endpoint_compute_type: Required. [Required] The compute type of the endpoint.Constant - filled by server. Possible values include: "Managed", "Kubernetes", "AzureMLCompute". - :vartype endpoint_compute_type: str or - ~azure.mgmt.machinelearningservices.models.EndpointComputeType - :ivar instance_type: Compute instance type. - :vartype instance_type: str - :ivar liveness_probe: Liveness probe monitors the health of the container regularly. - :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :ivar model: The URI path to the model. - :vartype model: str - :ivar model_mount_path: The path to mount the model in custom container. - :vartype model_mount_path: str - :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values - include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState - :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. - The properties and defaults are the same as liveness probe. - :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :ivar request_settings: Request settings for the deployment. - :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings - :ivar scale_settings: Scale settings for the deployment. - If it is null or not provided, - it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment - and to DefaultScaleSettings for ManagedOnlineDeployment. - :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings - """ - - _validation = { - 'endpoint_compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, - 'data_collector': {'key': 'dataCollector', 'type': 'DataCollector'}, - 'egress_public_network_access': {'key': 'egressPublicNetworkAccess', 'type': 'str'}, - 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, - 'model': {'key': 'model', 'type': 'str'}, - 'model_mount_path': {'key': 'modelMountPath', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, - 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, - } - - _subtype_map = { - 'endpoint_compute_type': {'Kubernetes': 'KubernetesOnlineDeployment', 'Managed': 'ManagedOnlineDeployment'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword code_configuration: Code configuration for the endpoint deployment. - :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :keyword description: Description of the endpoint deployment. - :paramtype description: str - :keyword environment_id: ARM resource ID of the environment specification for the endpoint - deployment. - :paramtype environment_id: str - :keyword environment_variables: Environment variables configuration for the deployment. - :paramtype environment_variables: dict[str, str] - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword app_insights_enabled: If true, enables Application Insights logging. - :paramtype app_insights_enabled: bool - :keyword data_collector: The mdc configuration, we disable mdc when it's null. - :paramtype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector - :keyword egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Possible values include: "Enabled", - "Disabled". - :paramtype egress_public_network_access: str or - ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :keyword instance_type: Compute instance type. - :paramtype instance_type: str - :keyword liveness_probe: Liveness probe monitors the health of the container regularly. - :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :keyword model: The URI path to the model. - :paramtype model: str - :keyword model_mount_path: The path to mount the model in custom container. - :paramtype model_mount_path: str - :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. - The properties and defaults are the same as liveness probe. - :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :keyword request_settings: Request settings for the deployment. - :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings - :keyword scale_settings: Scale settings for the deployment. - If it is null or not provided, - it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment - and to DefaultScaleSettings for ManagedOnlineDeployment. - :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings - """ - super(OnlineDeploymentProperties, self).__init__(**kwargs) - self.app_insights_enabled = kwargs.get('app_insights_enabled', False) - self.data_collector = kwargs.get('data_collector', None) - self.egress_public_network_access = kwargs.get('egress_public_network_access', None) - self.endpoint_compute_type = 'OnlineDeploymentProperties' # type: str - self.instance_type = kwargs.get('instance_type', None) - self.liveness_probe = kwargs.get('liveness_probe', None) - self.model = kwargs.get('model', None) - self.model_mount_path = kwargs.get('model_mount_path', None) - self.provisioning_state = None - self.readiness_probe = kwargs.get('readiness_probe', None) - self.request_settings = kwargs.get('request_settings', None) - self.scale_settings = kwargs.get('scale_settings', None) - - -class KubernetesOnlineDeployment(OnlineDeploymentProperties): - """Properties specific to a KubernetesOnlineDeployment. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar code_configuration: Code configuration for the endpoint deployment. - :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :ivar description: Description of the endpoint deployment. - :vartype description: str - :ivar environment_id: ARM resource ID of the environment specification for the endpoint - deployment. - :vartype environment_id: str - :ivar environment_variables: Environment variables configuration for the deployment. - :vartype environment_variables: dict[str, str] - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar app_insights_enabled: If true, enables Application Insights logging. - :vartype app_insights_enabled: bool - :ivar data_collector: The mdc configuration, we disable mdc when it's null. - :vartype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector - :ivar egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Possible values include: "Enabled", - "Disabled". - :vartype egress_public_network_access: str or - ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :ivar endpoint_compute_type: Required. [Required] The compute type of the endpoint.Constant - filled by server. Possible values include: "Managed", "Kubernetes", "AzureMLCompute". - :vartype endpoint_compute_type: str or - ~azure.mgmt.machinelearningservices.models.EndpointComputeType - :ivar instance_type: Compute instance type. - :vartype instance_type: str - :ivar liveness_probe: Liveness probe monitors the health of the container regularly. - :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :ivar model: The URI path to the model. - :vartype model: str - :ivar model_mount_path: The path to mount the model in custom container. - :vartype model_mount_path: str - :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values - include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState - :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. - The properties and defaults are the same as liveness probe. - :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :ivar request_settings: Request settings for the deployment. - :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings - :ivar scale_settings: Scale settings for the deployment. - If it is null or not provided, - it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment - and to DefaultScaleSettings for ManagedOnlineDeployment. - :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings - :ivar container_resource_requirements: The resource requirements for the container (cpu and - memory). - :vartype container_resource_requirements: - ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements - """ - - _validation = { - 'endpoint_compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, - 'data_collector': {'key': 'dataCollector', 'type': 'DataCollector'}, - 'egress_public_network_access': {'key': 'egressPublicNetworkAccess', 'type': 'str'}, - 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, - 'model': {'key': 'model', 'type': 'str'}, - 'model_mount_path': {'key': 'modelMountPath', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, - 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, - 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword code_configuration: Code configuration for the endpoint deployment. - :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :keyword description: Description of the endpoint deployment. - :paramtype description: str - :keyword environment_id: ARM resource ID of the environment specification for the endpoint - deployment. - :paramtype environment_id: str - :keyword environment_variables: Environment variables configuration for the deployment. - :paramtype environment_variables: dict[str, str] - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword app_insights_enabled: If true, enables Application Insights logging. - :paramtype app_insights_enabled: bool - :keyword data_collector: The mdc configuration, we disable mdc when it's null. - :paramtype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector - :keyword egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Possible values include: "Enabled", - "Disabled". - :paramtype egress_public_network_access: str or - ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :keyword instance_type: Compute instance type. - :paramtype instance_type: str - :keyword liveness_probe: Liveness probe monitors the health of the container regularly. - :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :keyword model: The URI path to the model. - :paramtype model: str - :keyword model_mount_path: The path to mount the model in custom container. - :paramtype model_mount_path: str - :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. - The properties and defaults are the same as liveness probe. - :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :keyword request_settings: Request settings for the deployment. - :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings - :keyword scale_settings: Scale settings for the deployment. - If it is null or not provided, - it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment - and to DefaultScaleSettings for ManagedOnlineDeployment. - :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings - :keyword container_resource_requirements: The resource requirements for the container (cpu and - memory). - :paramtype container_resource_requirements: - ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements - """ - super(KubernetesOnlineDeployment, self).__init__(**kwargs) - self.endpoint_compute_type = 'Kubernetes' # type: str - self.container_resource_requirements = kwargs.get('container_resource_requirements', None) - - -class KubernetesProperties(msrest.serialization.Model): - """Kubernetes properties. - - :ivar relay_connection_string: Relay connection string. - :vartype relay_connection_string: str - :ivar service_bus_connection_string: ServiceBus connection string. - :vartype service_bus_connection_string: str - :ivar extension_principal_id: Extension principal-id. - :vartype extension_principal_id: str - :ivar extension_instance_release_train: Extension instance release train. - :vartype extension_instance_release_train: str - :ivar vc_name: VC name. - :vartype vc_name: str - :ivar namespace: Compute namespace. - :vartype namespace: str - :ivar default_instance_type: Default instance type. - :vartype default_instance_type: str - :ivar instance_types: Instance Type Schema. - :vartype instance_types: dict[str, - ~azure.mgmt.machinelearningservices.models.InstanceTypeSchema] - """ - - _attribute_map = { - 'relay_connection_string': {'key': 'relayConnectionString', 'type': 'str'}, - 'service_bus_connection_string': {'key': 'serviceBusConnectionString', 'type': 'str'}, - 'extension_principal_id': {'key': 'extensionPrincipalId', 'type': 'str'}, - 'extension_instance_release_train': {'key': 'extensionInstanceReleaseTrain', 'type': 'str'}, - 'vc_name': {'key': 'vcName', 'type': 'str'}, - 'namespace': {'key': 'namespace', 'type': 'str'}, - 'default_instance_type': {'key': 'defaultInstanceType', 'type': 'str'}, - 'instance_types': {'key': 'instanceTypes', 'type': '{InstanceTypeSchema}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword relay_connection_string: Relay connection string. - :paramtype relay_connection_string: str - :keyword service_bus_connection_string: ServiceBus connection string. - :paramtype service_bus_connection_string: str - :keyword extension_principal_id: Extension principal-id. - :paramtype extension_principal_id: str - :keyword extension_instance_release_train: Extension instance release train. - :paramtype extension_instance_release_train: str - :keyword vc_name: VC name. - :paramtype vc_name: str - :keyword namespace: Compute namespace. - :paramtype namespace: str - :keyword default_instance_type: Default instance type. - :paramtype default_instance_type: str - :keyword instance_types: Instance Type Schema. - :paramtype instance_types: dict[str, - ~azure.mgmt.machinelearningservices.models.InstanceTypeSchema] - """ - super(KubernetesProperties, self).__init__(**kwargs) - self.relay_connection_string = kwargs.get('relay_connection_string', None) - self.service_bus_connection_string = kwargs.get('service_bus_connection_string', None) - self.extension_principal_id = kwargs.get('extension_principal_id', None) - self.extension_instance_release_train = kwargs.get('extension_instance_release_train', None) - self.vc_name = kwargs.get('vc_name', None) - self.namespace = kwargs.get('namespace', "default") - self.default_instance_type = kwargs.get('default_instance_type', None) - self.instance_types = kwargs.get('instance_types', None) - - -class LabelCategory(msrest.serialization.Model): - """Label category definition. - - :ivar classes: Dictionary of label classes in this category. - :vartype classes: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] - :ivar display_name: Display name of the label category. - :vartype display_name: str - :ivar multi_select: Indicates whether it is allowed to select multiple classes in this - category. Possible values include: "Enabled", "Disabled". - :vartype multi_select: str or ~azure.mgmt.machinelearningservices.models.MultiSelect - """ - - _attribute_map = { - 'classes': {'key': 'classes', 'type': '{LabelClass}'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'multi_select': {'key': 'multiSelect', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword classes: Dictionary of label classes in this category. - :paramtype classes: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] - :keyword display_name: Display name of the label category. - :paramtype display_name: str - :keyword multi_select: Indicates whether it is allowed to select multiple classes in this - category. Possible values include: "Enabled", "Disabled". - :paramtype multi_select: str or ~azure.mgmt.machinelearningservices.models.MultiSelect - """ - super(LabelCategory, self).__init__(**kwargs) - self.classes = kwargs.get('classes', None) - self.display_name = kwargs.get('display_name', None) - self.multi_select = kwargs.get('multi_select', None) - - -class LabelClass(msrest.serialization.Model): - """Label class definition. - - :ivar display_name: Display name of the label class. - :vartype display_name: str - :ivar subclasses: Dictionary of subclasses of the label class. - :vartype subclasses: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] - """ - - _attribute_map = { - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'subclasses': {'key': 'subclasses', 'type': '{LabelClass}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword display_name: Display name of the label class. - :paramtype display_name: str - :keyword subclasses: Dictionary of subclasses of the label class. - :paramtype subclasses: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] - """ - super(LabelClass, self).__init__(**kwargs) - self.display_name = kwargs.get('display_name', None) - self.subclasses = kwargs.get('subclasses', None) - - -class LabelingDataConfiguration(msrest.serialization.Model): - """Labeling data configuration definition. - - :ivar data_id: Resource Id of the data asset to perform labeling. - :vartype data_id: str - :ivar incremental_data_refresh: Indicates whether to enable incremental data refresh. Possible - values include: "Enabled", "Disabled". - :vartype incremental_data_refresh: str or - ~azure.mgmt.machinelearningservices.models.IncrementalDataRefresh - """ - - _attribute_map = { - 'data_id': {'key': 'dataId', 'type': 'str'}, - 'incremental_data_refresh': {'key': 'incrementalDataRefresh', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword data_id: Resource Id of the data asset to perform labeling. - :paramtype data_id: str - :keyword incremental_data_refresh: Indicates whether to enable incremental data refresh. - Possible values include: "Enabled", "Disabled". - :paramtype incremental_data_refresh: str or - ~azure.mgmt.machinelearningservices.models.IncrementalDataRefresh - """ - super(LabelingDataConfiguration, self).__init__(**kwargs) - self.data_id = kwargs.get('data_id', None) - self.incremental_data_refresh = kwargs.get('incremental_data_refresh', None) - - -class LabelingJob(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'LabelingJobProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties - """ - super(LabelingJob, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class LabelingJobMediaProperties(msrest.serialization.Model): - """Properties of a labeling job. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: LabelingJobImageProperties, LabelingJobTextProperties. - - All required parameters must be populated in order to send to Azure. - - :ivar media_type: Required. [Required] Media type of the job.Constant filled by server. - Possible values include: "Image", "Text". - :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType - """ - - _validation = { - 'media_type': {'required': True}, - } - - _attribute_map = { - 'media_type': {'key': 'mediaType', 'type': 'str'}, - } - - _subtype_map = { - 'media_type': {'Image': 'LabelingJobImageProperties', 'Text': 'LabelingJobTextProperties'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(LabelingJobMediaProperties, self).__init__(**kwargs) - self.media_type = None # type: Optional[str] - - -class LabelingJobImageProperties(LabelingJobMediaProperties): - """Properties of a labeling job for image data. - - All required parameters must be populated in order to send to Azure. - - :ivar media_type: Required. [Required] Media type of the job.Constant filled by server. - Possible values include: "Image", "Text". - :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType - :ivar annotation_type: Annotation type of image labeling job. Possible values include: - "Classification", "BoundingBox", "InstanceSegmentation". - :vartype annotation_type: str or ~azure.mgmt.machinelearningservices.models.ImageAnnotationType - """ - - _validation = { - 'media_type': {'required': True}, - } - - _attribute_map = { - 'media_type': {'key': 'mediaType', 'type': 'str'}, - 'annotation_type': {'key': 'annotationType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword annotation_type: Annotation type of image labeling job. Possible values include: - "Classification", "BoundingBox", "InstanceSegmentation". - :paramtype annotation_type: str or - ~azure.mgmt.machinelearningservices.models.ImageAnnotationType - """ - super(LabelingJobImageProperties, self).__init__(**kwargs) - self.media_type = 'Image' # type: str - self.annotation_type = kwargs.get('annotation_type', None) - - -class LabelingJobInstructions(msrest.serialization.Model): - """Instructions for labeling job. - - :ivar uri: The link to a page with detailed labeling instructions for labelers. - :vartype uri: str - """ - - _attribute_map = { - 'uri': {'key': 'uri', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword uri: The link to a page with detailed labeling instructions for labelers. - :paramtype uri: str - """ - super(LabelingJobInstructions, self).__init__(**kwargs) - self.uri = kwargs.get('uri', None) - - -class LabelingJobProperties(JobBaseProperties): - """Labeling job definition. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar component_id: ARM resource ID of the component resource. - :vartype component_id: str - :ivar compute_id: ARM resource ID of the compute resource. - :vartype compute_id: str - :ivar display_name: Display name of job. - :vartype display_name: str - :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :vartype experiment_name: str - :ivar identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". - :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType - :ivar notification_setting: Notification setting for the job. - :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :ivar secrets_configuration: Configuration for secrets to be made available during runtime. - :vartype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :ivar services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus - :ivar created_date_time: Created time of the job in UTC timezone. - :vartype created_date_time: ~datetime.datetime - :ivar data_configuration: Configuration of data used in the job. - :vartype data_configuration: - ~azure.mgmt.machinelearningservices.models.LabelingDataConfiguration - :ivar job_instructions: Labeling instructions of the job. - :vartype job_instructions: ~azure.mgmt.machinelearningservices.models.LabelingJobInstructions - :ivar label_categories: Label categories of the job. - :vartype label_categories: dict[str, ~azure.mgmt.machinelearningservices.models.LabelCategory] - :ivar labeling_job_media_properties: Media type specific properties in the job. - :vartype labeling_job_media_properties: - ~azure.mgmt.machinelearningservices.models.LabelingJobMediaProperties - :ivar ml_assist_configuration: Configuration of MLAssist feature in the job. - :vartype ml_assist_configuration: - ~azure.mgmt.machinelearningservices.models.MLAssistConfiguration - :ivar progress_metrics: Progress metrics of the job. - :vartype progress_metrics: ~azure.mgmt.machinelearningservices.models.ProgressMetrics - :ivar project_id: Internal id of the job(Previously called project). - :vartype project_id: str - :ivar provisioning_state: Specifies the labeling job provisioning state. Possible values - include: "Succeeded", "Failed", "Canceled", "InProgress". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.JobProvisioningState - :ivar status_messages: Status messages of the job. - :vartype status_messages: list[~azure.mgmt.machinelearningservices.models.StatusMessage] - """ - - _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'created_date_time': {'readonly': True}, - 'progress_metrics': {'readonly': True}, - 'project_id': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'status_messages': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'}, - 'data_configuration': {'key': 'dataConfiguration', 'type': 'LabelingDataConfiguration'}, - 'job_instructions': {'key': 'jobInstructions', 'type': 'LabelingJobInstructions'}, - 'label_categories': {'key': 'labelCategories', 'type': '{LabelCategory}'}, - 'labeling_job_media_properties': {'key': 'labelingJobMediaProperties', 'type': 'LabelingJobMediaProperties'}, - 'ml_assist_configuration': {'key': 'mlAssistConfiguration', 'type': 'MLAssistConfiguration'}, - 'progress_metrics': {'key': 'progressMetrics', 'type': 'ProgressMetrics'}, - 'project_id': {'key': 'projectId', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'status_messages': {'key': 'statusMessages', 'type': '[StatusMessage]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword component_id: ARM resource ID of the component resource. - :paramtype component_id: str - :keyword compute_id: ARM resource ID of the compute resource. - :paramtype compute_id: str - :keyword display_name: Display name of job. - :paramtype display_name: str - :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :paramtype experiment_name: str - :keyword identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - :keyword notification_setting: Notification setting for the job. - :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :keyword secrets_configuration: Configuration for secrets to be made available during runtime. - :paramtype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :keyword services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :keyword data_configuration: Configuration of data used in the job. - :paramtype data_configuration: - ~azure.mgmt.machinelearningservices.models.LabelingDataConfiguration - :keyword job_instructions: Labeling instructions of the job. - :paramtype job_instructions: ~azure.mgmt.machinelearningservices.models.LabelingJobInstructions - :keyword label_categories: Label categories of the job. - :paramtype label_categories: dict[str, - ~azure.mgmt.machinelearningservices.models.LabelCategory] - :keyword labeling_job_media_properties: Media type specific properties in the job. - :paramtype labeling_job_media_properties: - ~azure.mgmt.machinelearningservices.models.LabelingJobMediaProperties - :keyword ml_assist_configuration: Configuration of MLAssist feature in the job. - :paramtype ml_assist_configuration: - ~azure.mgmt.machinelearningservices.models.MLAssistConfiguration - """ - super(LabelingJobProperties, self).__init__(**kwargs) - self.job_type = 'Labeling' # type: str - self.created_date_time = None - self.data_configuration = kwargs.get('data_configuration', None) - self.job_instructions = kwargs.get('job_instructions', None) - self.label_categories = kwargs.get('label_categories', None) - self.labeling_job_media_properties = kwargs.get('labeling_job_media_properties', None) - self.ml_assist_configuration = kwargs.get('ml_assist_configuration', None) - self.progress_metrics = None - self.project_id = None - self.provisioning_state = None - self.status_messages = None - - -class LabelingJobResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of LabelingJob entities. - - :ivar next_link: The link to the next page of LabelingJob objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type LabelingJob. - :vartype value: list[~azure.mgmt.machinelearningservices.models.LabelingJob] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[LabelingJob]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of LabelingJob objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type LabelingJob. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.LabelingJob] - """ - super(LabelingJobResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class LabelingJobTextProperties(LabelingJobMediaProperties): - """Properties of a labeling job for text data. - - All required parameters must be populated in order to send to Azure. - - :ivar media_type: Required. [Required] Media type of the job.Constant filled by server. - Possible values include: "Image", "Text". - :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType - :ivar annotation_type: Annotation type of text labeling job. Possible values include: - "Classification", "NamedEntityRecognition". - :vartype annotation_type: str or ~azure.mgmt.machinelearningservices.models.TextAnnotationType - """ - - _validation = { - 'media_type': {'required': True}, - } - - _attribute_map = { - 'media_type': {'key': 'mediaType', 'type': 'str'}, - 'annotation_type': {'key': 'annotationType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword annotation_type: Annotation type of text labeling job. Possible values include: - "Classification", "NamedEntityRecognition". - :paramtype annotation_type: str or - ~azure.mgmt.machinelearningservices.models.TextAnnotationType - """ - super(LabelingJobTextProperties, self).__init__(**kwargs) - self.media_type = 'Text' # type: str - self.annotation_type = kwargs.get('annotation_type', None) - - -class OneLakeArtifact(msrest.serialization.Model): - """OneLake artifact (data source) configuration. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: LakeHouseArtifact. - - All required parameters must be populated in order to send to Azure. - - :ivar artifact_name: Required. [Required] OneLake artifact name. - :vartype artifact_name: str - :ivar artifact_type: Required. [Required] OneLake artifact type.Constant filled by server. - Possible values include: "LakeHouse". - :vartype artifact_type: str or ~azure.mgmt.machinelearningservices.models.OneLakeArtifactType - """ - - _validation = { - 'artifact_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'artifact_type': {'required': True}, - } - - _attribute_map = { - 'artifact_name': {'key': 'artifactName', 'type': 'str'}, - 'artifact_type': {'key': 'artifactType', 'type': 'str'}, - } - - _subtype_map = { - 'artifact_type': {'LakeHouse': 'LakeHouseArtifact'} - } - - def __init__( - self, - **kwargs - ): - """ - :keyword artifact_name: Required. [Required] OneLake artifact name. - :paramtype artifact_name: str - """ - super(OneLakeArtifact, self).__init__(**kwargs) - self.artifact_name = kwargs['artifact_name'] - self.artifact_type = None # type: Optional[str] - - -class LakeHouseArtifact(OneLakeArtifact): - """LakeHouseArtifact. - - All required parameters must be populated in order to send to Azure. - - :ivar artifact_name: Required. [Required] OneLake artifact name. - :vartype artifact_name: str - :ivar artifact_type: Required. [Required] OneLake artifact type.Constant filled by server. - Possible values include: "LakeHouse". - :vartype artifact_type: str or ~azure.mgmt.machinelearningservices.models.OneLakeArtifactType - """ - - _validation = { - 'artifact_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'artifact_type': {'required': True}, - } - - _attribute_map = { - 'artifact_name': {'key': 'artifactName', 'type': 'str'}, - 'artifact_type': {'key': 'artifactType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword artifact_name: Required. [Required] OneLake artifact name. - :paramtype artifact_name: str - """ - super(LakeHouseArtifact, self).__init__(**kwargs) - self.artifact_type = 'LakeHouse' # type: str - - -class ListAmlUserFeatureResult(msrest.serialization.Model): - """The List Aml user feature operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of AML user facing features. - :vartype value: list[~azure.mgmt.machinelearningservices.models.AmlUserFeature] - :ivar next_link: The URI to fetch the next page of AML user features information. Call - ListNext() with this to fetch the next page of AML user features information. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[AmlUserFeature]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ListAmlUserFeatureResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class ListNotebookKeysResult(msrest.serialization.Model): - """ListNotebookKeysResult. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar primary_access_key: The primary access key of the Notebook. - :vartype primary_access_key: str - :ivar secondary_access_key: The secondary access key of the Notebook. - :vartype secondary_access_key: str - """ - - _validation = { - 'primary_access_key': {'readonly': True}, - 'secondary_access_key': {'readonly': True}, - } - - _attribute_map = { - 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'}, - 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ListNotebookKeysResult, self).__init__(**kwargs) - self.primary_access_key = None - self.secondary_access_key = None - - -class ListStorageAccountKeysResult(msrest.serialization.Model): - """ListStorageAccountKeysResult. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar user_storage_key: The access key of the storage. - :vartype user_storage_key: str - """ - - _validation = { - 'user_storage_key': {'readonly': True}, - } - - _attribute_map = { - 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ListStorageAccountKeysResult, self).__init__(**kwargs) - self.user_storage_key = None - - -class ListUsagesResult(msrest.serialization.Model): - """The List Usages operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of AML resource usages. - :vartype value: list[~azure.mgmt.machinelearningservices.models.Usage] - :ivar next_link: The URI to fetch the next page of AML resource usage information. Call - ListNext() with this to fetch the next page of AML resource usage information. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[Usage]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ListUsagesResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class ListWorkspaceKeysResult(msrest.serialization.Model): - """ListWorkspaceKeysResult. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar app_insights_instrumentation_key: The access key of the workspace app insights. - :vartype app_insights_instrumentation_key: str - :ivar container_registry_credentials: - :vartype container_registry_credentials: - ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult - :ivar notebook_access_keys: - :vartype notebook_access_keys: - ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult - :ivar user_storage_arm_id: The arm Id key of the workspace storage. - :vartype user_storage_arm_id: str - :ivar user_storage_key: The access key of the workspace storage. - :vartype user_storage_key: str - """ - - _validation = { - 'app_insights_instrumentation_key': {'readonly': True}, - 'user_storage_arm_id': {'readonly': True}, - 'user_storage_key': {'readonly': True}, - } - - _attribute_map = { - 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'}, - 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'}, - 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'ListNotebookKeysResult'}, - 'user_storage_arm_id': {'key': 'userStorageArmId', 'type': 'str'}, - 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword container_registry_credentials: - :paramtype container_registry_credentials: - ~azure.mgmt.machinelearningservices.models.RegistryListCredentialsResult - :keyword notebook_access_keys: - :paramtype notebook_access_keys: - ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult - """ - super(ListWorkspaceKeysResult, self).__init__(**kwargs) - self.app_insights_instrumentation_key = None - self.container_registry_credentials = kwargs.get('container_registry_credentials', None) - self.notebook_access_keys = kwargs.get('notebook_access_keys', None) - self.user_storage_arm_id = None - self.user_storage_key = None - - -class ListWorkspaceQuotas(msrest.serialization.Model): - """The List WorkspaceQuotasByVMFamily operation response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of Workspace Quotas by VM Family. - :vartype value: list[~azure.mgmt.machinelearningservices.models.ResourceQuota] - :ivar next_link: The URI to fetch the next page of workspace quota information by VM Family. - Call ListNext() with this to fetch the next page of Workspace Quota information. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ResourceQuota]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ListWorkspaceQuotas, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class LiteralJobInput(JobInput): - """Literal input type. - - All required parameters must be populated in order to send to Azure. - - :ivar description: Description for the input. - :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :ivar value: Required. [Required] Literal value for the input. - :vartype value: str - """ - - _validation = { - 'job_input_type': {'required': True}, - 'value': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: Description for the input. - :paramtype description: str - :keyword value: Required. [Required] Literal value for the input. - :paramtype value: str - """ - super(LiteralJobInput, self).__init__(**kwargs) - self.job_input_type = 'literal' # type: str - self.value = kwargs['value'] - - -class ManagedComputeIdentity(MonitorComputeIdentityBase): - """Managed compute identity definition. - - All required parameters must be populated in order to send to Azure. - - :ivar compute_identity_type: Required. [Required] Monitor compute identity type enum.Constant - filled by server. Possible values include: "AmlToken", "ManagedIdentity". - :vartype compute_identity_type: str or - ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityType - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - """ - - _validation = { - 'compute_identity_type': {'required': True}, - } - - _attribute_map = { - 'compute_identity_type': {'key': 'computeIdentityType', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - """ - super(ManagedComputeIdentity, self).__init__(**kwargs) - self.compute_identity_type = 'ManagedIdentity' # type: str - self.identity = kwargs.get('identity', None) - - -class ManagedIdentity(IdentityConfiguration): - """Managed identity configuration. - - All required parameters must be populated in order to send to Azure. - - :ivar identity_type: Required. [Required] Specifies the type of identity framework.Constant - filled by server. Possible values include: "Managed", "AMLToken", "UserIdentity". - :vartype identity_type: str or - ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType - :ivar client_id: Specifies a user-assigned identity by client ID. For system-assigned, do not - set this field. - :vartype client_id: str - :ivar object_id: Specifies a user-assigned identity by object ID. For system-assigned, do not - set this field. - :vartype object_id: str - :ivar resource_id: Specifies a user-assigned identity by ARM resource ID. For system-assigned, - do not set this field. - :vartype resource_id: str - """ - - _validation = { - 'identity_type': {'required': True}, - } - - _attribute_map = { - 'identity_type': {'key': 'identityType', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'object_id': {'key': 'objectId', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword client_id: Specifies a user-assigned identity by client ID. For system-assigned, do - not set this field. - :paramtype client_id: str - :keyword object_id: Specifies a user-assigned identity by object ID. For system-assigned, do - not set this field. - :paramtype object_id: str - :keyword resource_id: Specifies a user-assigned identity by ARM resource ID. For - system-assigned, do not set this field. - :paramtype resource_id: str - """ - super(ManagedIdentity, self).__init__(**kwargs) - self.identity_type = 'Managed' # type: str - self.client_id = kwargs.get('client_id', None) - self.object_id = kwargs.get('object_id', None) - self.resource_id = kwargs.get('resource_id', None) - - -class ManagedIdentityAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): - """ManagedIdentityAuthTypeWorkspaceConnectionProperties. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar expiry_time: - :vartype expiry_time: ~datetime.datetime - :ivar metadata: Any object. - :vartype metadata: any - :ivar target: - :vartype target: str - :ivar credentials: - :vartype credentials: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionManagedIdentity - """ - - _validation = { - 'auth_type': {'required': True}, - } - - _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionManagedIdentity'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :keyword expiry_time: - :paramtype expiry_time: ~datetime.datetime - :keyword metadata: Any object. - :paramtype metadata: any - :keyword target: - :paramtype target: str - :keyword credentials: - :paramtype credentials: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionManagedIdentity - """ - super(ManagedIdentityAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'ManagedIdentity' # type: str - self.credentials = kwargs.get('credentials', None) - - -class ManagedNetworkProvisionOptions(msrest.serialization.Model): - """Managed Network Provisioning options for managed network of a machine learning workspace. - - :ivar include_spark: - :vartype include_spark: bool - """ - - _attribute_map = { - 'include_spark': {'key': 'includeSpark', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword include_spark: - :paramtype include_spark: bool - """ - super(ManagedNetworkProvisionOptions, self).__init__(**kwargs) - self.include_spark = kwargs.get('include_spark', None) - - -class ManagedNetworkProvisionStatus(msrest.serialization.Model): - """Status of the Provisioning for the managed network of a machine learning workspace. - - :ivar spark_ready: - :vartype spark_ready: bool - :ivar status: Status for the managed network of a machine learning workspace. Possible values - include: "Inactive", "Active". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.ManagedNetworkStatus - """ - - _attribute_map = { - 'spark_ready': {'key': 'sparkReady', 'type': 'bool'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword spark_ready: - :paramtype spark_ready: bool - :keyword status: Status for the managed network of a machine learning workspace. Possible - values include: "Inactive", "Active". - :paramtype status: str or ~azure.mgmt.machinelearningservices.models.ManagedNetworkStatus - """ - super(ManagedNetworkProvisionStatus, self).__init__(**kwargs) - self.spark_ready = kwargs.get('spark_ready', None) - self.status = kwargs.get('status', None) - - -class ManagedNetworkSettings(msrest.serialization.Model): - """Managed Network settings for a machine learning workspace. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar isolation_mode: Isolation mode for the managed network of a machine learning workspace. - Possible values include: "Disabled", "AllowInternetOutbound", "AllowOnlyApprovedOutbound". - :vartype isolation_mode: str or ~azure.mgmt.machinelearningservices.models.IsolationMode - :ivar network_id: - :vartype network_id: str - :ivar outbound_rules: Dictionary of :code:``. - :vartype outbound_rules: dict[str, ~azure.mgmt.machinelearningservices.models.OutboundRule] - :ivar status: Status of the Provisioning for the managed network of a machine learning - workspace. - :vartype status: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus - """ - - _validation = { - 'network_id': {'readonly': True}, - } - - _attribute_map = { - 'isolation_mode': {'key': 'isolationMode', 'type': 'str'}, - 'network_id': {'key': 'networkId', 'type': 'str'}, - 'outbound_rules': {'key': 'outboundRules', 'type': '{OutboundRule}'}, - 'status': {'key': 'status', 'type': 'ManagedNetworkProvisionStatus'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword isolation_mode: Isolation mode for the managed network of a machine learning - workspace. Possible values include: "Disabled", "AllowInternetOutbound", - "AllowOnlyApprovedOutbound". - :paramtype isolation_mode: str or ~azure.mgmt.machinelearningservices.models.IsolationMode - :keyword outbound_rules: Dictionary of :code:``. - :paramtype outbound_rules: dict[str, ~azure.mgmt.machinelearningservices.models.OutboundRule] - :keyword status: Status of the Provisioning for the managed network of a machine learning - workspace. - :paramtype status: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus - """ - super(ManagedNetworkSettings, self).__init__(**kwargs) - self.isolation_mode = kwargs.get('isolation_mode', None) - self.network_id = None - self.outbound_rules = kwargs.get('outbound_rules', None) - self.status = kwargs.get('status', None) - - -class ManagedOnlineDeployment(OnlineDeploymentProperties): - """Properties specific to a ManagedOnlineDeployment. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar code_configuration: Code configuration for the endpoint deployment. - :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :ivar description: Description of the endpoint deployment. - :vartype description: str - :ivar environment_id: ARM resource ID of the environment specification for the endpoint - deployment. - :vartype environment_id: str - :ivar environment_variables: Environment variables configuration for the deployment. - :vartype environment_variables: dict[str, str] - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar app_insights_enabled: If true, enables Application Insights logging. - :vartype app_insights_enabled: bool - :ivar data_collector: The mdc configuration, we disable mdc when it's null. - :vartype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector - :ivar egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Possible values include: "Enabled", - "Disabled". - :vartype egress_public_network_access: str or - ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :ivar endpoint_compute_type: Required. [Required] The compute type of the endpoint.Constant - filled by server. Possible values include: "Managed", "Kubernetes", "AzureMLCompute". - :vartype endpoint_compute_type: str or - ~azure.mgmt.machinelearningservices.models.EndpointComputeType - :ivar instance_type: Compute instance type. - :vartype instance_type: str - :ivar liveness_probe: Liveness probe monitors the health of the container regularly. - :vartype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :ivar model: The URI path to the model. - :vartype model: str - :ivar model_mount_path: The path to mount the model in custom container. - :vartype model_mount_path: str - :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values - include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState - :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. - The properties and defaults are the same as liveness probe. - :vartype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :ivar request_settings: Request settings for the deployment. - :vartype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings - :ivar scale_settings: Scale settings for the deployment. - If it is null or not provided, - it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment - and to DefaultScaleSettings for ManagedOnlineDeployment. - :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings - """ - - _validation = { - 'endpoint_compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, - 'data_collector': {'key': 'dataCollector', 'type': 'DataCollector'}, - 'egress_public_network_access': {'key': 'egressPublicNetworkAccess', 'type': 'str'}, - 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, - 'model': {'key': 'model', 'type': 'str'}, - 'model_mount_path': {'key': 'modelMountPath', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, - 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword code_configuration: Code configuration for the endpoint deployment. - :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration - :keyword description: Description of the endpoint deployment. - :paramtype description: str - :keyword environment_id: ARM resource ID of the environment specification for the endpoint - deployment. - :paramtype environment_id: str - :keyword environment_variables: Environment variables configuration for the deployment. - :paramtype environment_variables: dict[str, str] - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword app_insights_enabled: If true, enables Application Insights logging. - :paramtype app_insights_enabled: bool - :keyword data_collector: The mdc configuration, we disable mdc when it's null. - :paramtype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector - :keyword egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Possible values include: "Enabled", - "Disabled". - :paramtype egress_public_network_access: str or - ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :keyword instance_type: Compute instance type. - :paramtype instance_type: str - :keyword liveness_probe: Liveness probe monitors the health of the container regularly. - :paramtype liveness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :keyword model: The URI path to the model. - :paramtype model: str - :keyword model_mount_path: The path to mount the model in custom container. - :paramtype model_mount_path: str - :keyword readiness_probe: Readiness probe validates if the container is ready to serve traffic. - The properties and defaults are the same as liveness probe. - :paramtype readiness_probe: ~azure.mgmt.machinelearningservices.models.ProbeSettings - :keyword request_settings: Request settings for the deployment. - :paramtype request_settings: ~azure.mgmt.machinelearningservices.models.OnlineRequestSettings - :keyword scale_settings: Scale settings for the deployment. - If it is null or not provided, - it defaults to TargetUtilizationScaleSettings for KubernetesOnlineDeployment - and to DefaultScaleSettings for ManagedOnlineDeployment. - :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings - """ - super(ManagedOnlineDeployment, self).__init__(**kwargs) - self.endpoint_compute_type = 'Managed' # type: str - - -class ManagedServiceIdentity(msrest.serialization.Model): - """Managed service identity (system assigned and/or user assigned identities). - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar principal_id: The service principal ID of the system assigned identity. This property - will only be provided for a system assigned identity. - :vartype principal_id: str - :ivar tenant_id: The tenant ID of the system assigned identity. This property will only be - provided for a system assigned identity. - :vartype tenant_id: str - :ivar type: Required. Type of managed service identity (where both SystemAssigned and - UserAssigned types are allowed). Possible values include: "None", "SystemAssigned", - "UserAssigned", "SystemAssigned,UserAssigned". - :vartype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType - :ivar user_assigned_identities: The set of user assigned identities associated with the - resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. - The dictionary values can be empty objects ({}) in requests. - :vartype user_assigned_identities: dict[str, - ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] - """ - - _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword type: Required. Type of managed service identity (where both SystemAssigned and - UserAssigned types are allowed). Possible values include: "None", "SystemAssigned", - "UserAssigned", "SystemAssigned,UserAssigned". - :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType - :keyword user_assigned_identities: The set of user assigned identities associated with the - resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. - The dictionary values can be empty objects ({}) in requests. - :paramtype user_assigned_identities: dict[str, - ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] - """ - super(ManagedServiceIdentity, self).__init__(**kwargs) - self.principal_id = None - self.tenant_id = None - self.type = kwargs['type'] - self.user_assigned_identities = kwargs.get('user_assigned_identities', None) - - -class MaterializationComputeResource(msrest.serialization.Model): - """Dto object representing compute resource. - - :ivar instance_type: Specifies the instance type. - :vartype instance_type: str - """ - - _attribute_map = { - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword instance_type: Specifies the instance type. - :paramtype instance_type: str - """ - super(MaterializationComputeResource, self).__init__(**kwargs) - self.instance_type = kwargs.get('instance_type', None) - - -class MaterializationSettings(msrest.serialization.Model): - """MaterializationSettings. - - :ivar notification: Specifies the notification details. - :vartype notification: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :ivar resource: Specifies the compute resource settings. - :vartype resource: ~azure.mgmt.machinelearningservices.models.MaterializationComputeResource - :ivar schedule: Specifies the schedule details. - :vartype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceTrigger - :ivar spark_configuration: Specifies the spark compute settings. - :vartype spark_configuration: dict[str, str] - :ivar store_type: Specifies the stores to which materialization should happen. Possible values - include: "None", "Online", "Offline", "OnlineAndOffline". - :vartype store_type: str or ~azure.mgmt.machinelearningservices.models.MaterializationStoreType - """ - - _attribute_map = { - 'notification': {'key': 'notification', 'type': 'NotificationSetting'}, - 'resource': {'key': 'resource', 'type': 'MaterializationComputeResource'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceTrigger'}, - 'spark_configuration': {'key': 'sparkConfiguration', 'type': '{str}'}, - 'store_type': {'key': 'storeType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword notification: Specifies the notification details. - :paramtype notification: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :keyword resource: Specifies the compute resource settings. - :paramtype resource: ~azure.mgmt.machinelearningservices.models.MaterializationComputeResource - :keyword schedule: Specifies the schedule details. - :paramtype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceTrigger - :keyword spark_configuration: Specifies the spark compute settings. - :paramtype spark_configuration: dict[str, str] - :keyword store_type: Specifies the stores to which materialization should happen. Possible - values include: "None", "Online", "Offline", "OnlineAndOffline". - :paramtype store_type: str or - ~azure.mgmt.machinelearningservices.models.MaterializationStoreType - """ - super(MaterializationSettings, self).__init__(**kwargs) - self.notification = kwargs.get('notification', None) - self.resource = kwargs.get('resource', None) - self.schedule = kwargs.get('schedule', None) - self.spark_configuration = kwargs.get('spark_configuration', None) - self.store_type = kwargs.get('store_type', None) - - -class MedianStoppingPolicy(EarlyTerminationPolicy): - """Defines an early termination policy based on running averages of the primary metric of all runs. - - All required parameters must be populated in order to send to Azure. - - :ivar delay_evaluation: Number of intervals by which to delay the first evaluation. - :vartype delay_evaluation: int - :ivar evaluation_interval: Interval (number of runs) between policy evaluations. - :vartype evaluation_interval: int - :ivar policy_type: Required. [Required] Name of policy configuration.Constant filled by server. - Possible values include: "Bandit", "MedianStopping", "TruncationSelection". - :vartype policy_type: str or - ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType - """ - - _validation = { - 'policy_type': {'required': True}, - } - - _attribute_map = { - 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, - 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, - 'policy_type': {'key': 'policyType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. - :paramtype delay_evaluation: int - :keyword evaluation_interval: Interval (number of runs) between policy evaluations. - :paramtype evaluation_interval: int - """ - super(MedianStoppingPolicy, self).__init__(**kwargs) - self.policy_type = 'MedianStopping' # type: str - - -class MLAssistConfiguration(msrest.serialization.Model): - """Labeling MLAssist configuration definition. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MLAssistConfigurationDisabled, MLAssistConfigurationEnabled. - - All required parameters must be populated in order to send to Azure. - - :ivar ml_assist: Required. [Required] Indicates whether MLAssist feature is enabled.Constant - filled by server. Possible values include: "Enabled", "Disabled". - :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType - """ - - _validation = { - 'ml_assist': {'required': True}, - } - - _attribute_map = { - 'ml_assist': {'key': 'mlAssist', 'type': 'str'}, - } - - _subtype_map = { - 'ml_assist': {'Disabled': 'MLAssistConfigurationDisabled', 'Enabled': 'MLAssistConfigurationEnabled'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(MLAssistConfiguration, self).__init__(**kwargs) - self.ml_assist = None # type: Optional[str] - - -class MLAssistConfigurationDisabled(MLAssistConfiguration): - """Labeling MLAssist configuration definition when MLAssist is disabled. - - All required parameters must be populated in order to send to Azure. - - :ivar ml_assist: Required. [Required] Indicates whether MLAssist feature is enabled.Constant - filled by server. Possible values include: "Enabled", "Disabled". - :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType - """ - - _validation = { - 'ml_assist': {'required': True}, - } - - _attribute_map = { - 'ml_assist': {'key': 'mlAssist', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(MLAssistConfigurationDisabled, self).__init__(**kwargs) - self.ml_assist = 'Disabled' # type: str - - -class MLAssistConfigurationEnabled(MLAssistConfiguration): - """Labeling MLAssist configuration definition when MLAssist is enabled. - - All required parameters must be populated in order to send to Azure. - - :ivar ml_assist: Required. [Required] Indicates whether MLAssist feature is enabled.Constant - filled by server. Possible values include: "Enabled", "Disabled". - :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType - :ivar inferencing_compute_binding: Required. [Required] AML compute binding used in - inferencing. - :vartype inferencing_compute_binding: str - :ivar training_compute_binding: Required. [Required] AML compute binding used in training. - :vartype training_compute_binding: str - """ - - _validation = { - 'ml_assist': {'required': True}, - 'inferencing_compute_binding': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'training_compute_binding': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'ml_assist': {'key': 'mlAssist', 'type': 'str'}, - 'inferencing_compute_binding': {'key': 'inferencingComputeBinding', 'type': 'str'}, - 'training_compute_binding': {'key': 'trainingComputeBinding', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword inferencing_compute_binding: Required. [Required] AML compute binding used in - inferencing. - :paramtype inferencing_compute_binding: str - :keyword training_compute_binding: Required. [Required] AML compute binding used in training. - :paramtype training_compute_binding: str - """ - super(MLAssistConfigurationEnabled, self).__init__(**kwargs) - self.ml_assist = 'Enabled' # type: str - self.inferencing_compute_binding = kwargs['inferencing_compute_binding'] - self.training_compute_binding = kwargs['training_compute_binding'] - - -class MLFlowModelJobInput(JobInput, AssetJobInput): - """MLFlowModelJobInput. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str - :ivar description: Description for the input. - :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - """ - - _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str - :keyword description: Description for the input. - :paramtype description: str - """ - super(MLFlowModelJobInput, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.uri = kwargs['uri'] - self.job_input_type = 'mlflow_model' # type: str - self.description = kwargs.get('description', None) - - -class MLFlowModelJobOutput(JobOutput, AssetJobOutput): - """MLFlowModelJobOutput. - - All required parameters must be populated in order to send to Azure. - - :ivar asset_name: Output Asset Name. - :vartype asset_name: str - :ivar asset_version: Output Asset Version. - :vartype asset_version: str - :ivar auto_delete_setting: Auto delete setting of output data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", - "Direct". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :ivar uri: Output Asset URI. - :vartype uri: str - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by - server. Possible values include: "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType - """ - - _validation = { - 'job_output_type': {'required': True}, - } - - _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword asset_name: Output Asset Name. - :paramtype asset_name: str - :keyword asset_version: Output Asset Version. - :paramtype asset_version: str - :keyword auto_delete_setting: Auto delete setting of output data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", - "Direct". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :keyword uri: Output Asset URI. - :paramtype uri: str - :keyword description: Description for the output. - :paramtype description: str - """ - super(MLFlowModelJobOutput, self).__init__(**kwargs) - self.asset_name = kwargs.get('asset_name', None) - self.asset_version = kwargs.get('asset_version', None) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.mode = kwargs.get('mode', None) - self.uri = kwargs.get('uri', None) - self.job_output_type = 'mlflow_model' # type: str - self.description = kwargs.get('description', None) - - -class MLTableData(DataVersionBaseProperties): - """MLTable data definition. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :vartype is_archived: bool - :ivar data_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "uri_file", "uri_folder", "mltable". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType - :ivar data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. - :vartype data_uri: str - :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual - Property. - :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar stage: Stage in the data lifecycle assigned to this data asset. - :vartype stage: str - :ivar referenced_uris: Uris referenced in the MLTable definition (required for lineage). - :vartype referenced_uris: list[str] - """ - - _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, - 'referenced_uris': {'key': 'referencedUris', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :paramtype is_archived: bool - :keyword data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. - :paramtype data_uri: str - :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual - Property. - :paramtype intellectual_property: - ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword stage: Stage in the data lifecycle assigned to this data asset. - :paramtype stage: str - :keyword referenced_uris: Uris referenced in the MLTable definition (required for lineage). - :paramtype referenced_uris: list[str] - """ - super(MLTableData, self).__init__(**kwargs) - self.data_type = 'mltable' # type: str - self.referenced_uris = kwargs.get('referenced_uris', None) - - -class MLTableJobInput(JobInput, AssetJobInput): - """MLTableJobInput. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str - :ivar description: Description for the input. - :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - """ - - _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str - :keyword description: Description for the input. - :paramtype description: str - """ - super(MLTableJobInput, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.uri = kwargs['uri'] - self.job_input_type = 'mltable' # type: str - self.description = kwargs.get('description', None) - - -class MLTableJobOutput(JobOutput, AssetJobOutput): - """MLTableJobOutput. - - All required parameters must be populated in order to send to Azure. - - :ivar asset_name: Output Asset Name. - :vartype asset_name: str - :ivar asset_version: Output Asset Version. - :vartype asset_version: str - :ivar auto_delete_setting: Auto delete setting of output data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", - "Direct". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :ivar uri: Output Asset URI. - :vartype uri: str - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by - server. Possible values include: "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType - """ - - _validation = { - 'job_output_type': {'required': True}, - } - - _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword asset_name: Output Asset Name. - :paramtype asset_name: str - :keyword asset_version: Output Asset Version. - :paramtype asset_version: str - :keyword auto_delete_setting: Auto delete setting of output data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", - "Direct". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :keyword uri: Output Asset URI. - :paramtype uri: str - :keyword description: Description for the output. - :paramtype description: str - """ - super(MLTableJobOutput, self).__init__(**kwargs) - self.asset_name = kwargs.get('asset_name', None) - self.asset_version = kwargs.get('asset_version', None) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.mode = kwargs.get('mode', None) - self.uri = kwargs.get('uri', None) - self.job_output_type = 'mltable' # type: str - self.description = kwargs.get('description', None) - - -class ModelConfiguration(msrest.serialization.Model): - """Model configuration options. - - :ivar mode: Input delivery mode for the model. Possible values include: "Copy", "Download". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.PackageInputDeliveryMode - :ivar mount_path: Relative mounting path of the model in the target image. - :vartype mount_path: str - """ - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'mount_path': {'key': 'mountPath', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: Input delivery mode for the model. Possible values include: "Copy", "Download". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.PackageInputDeliveryMode - :keyword mount_path: Relative mounting path of the model in the target image. - :paramtype mount_path: str - """ - super(ModelConfiguration, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.mount_path = kwargs.get('mount_path', None) - - -class ModelContainer(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.ModelContainerProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ModelContainerProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.ModelContainerProperties - """ - super(ModelContainer, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class ModelContainerProperties(AssetContainer): - """ModelContainerProperties. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar latest_version: The latest version inside this container. - :vartype latest_version: str - :ivar next_version: The next auto incremental version. - :vartype next_version: str - :ivar provisioning_state: Provisioning state for the model container. Possible values include: - "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState - """ - - _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - """ - super(ModelContainerProperties, self).__init__(**kwargs) - self.provisioning_state = None - - -class ModelContainerResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of ModelContainer entities. - - :ivar next_link: The link to the next page of ModelContainer objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type ModelContainer. - :vartype value: list[~azure.mgmt.machinelearningservices.models.ModelContainer] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ModelContainer]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of ModelContainer objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type ModelContainer. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.ModelContainer] - """ - super(ModelContainerResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class ModelPackageInput(msrest.serialization.Model): - """Model package input options. - - All required parameters must be populated in order to send to Azure. - - :ivar input_type: Required. [Required] Type of the input included in the target image. Possible - values include: "UriFile", "UriFolder". - :vartype input_type: str or ~azure.mgmt.machinelearningservices.models.PackageInputType - :ivar mode: Input delivery mode of the input. Possible values include: "Copy", "Download". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.PackageInputDeliveryMode - :ivar mount_path: Relative mount path of the input in the target image. - :vartype mount_path: str - :ivar path: Required. [Required] Location of the input. - :vartype path: ~azure.mgmt.machinelearningservices.models.PackageInputPathBase - """ - - _validation = { - 'input_type': {'required': True}, - 'path': {'required': True}, - } - - _attribute_map = { - 'input_type': {'key': 'inputType', 'type': 'str'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'mount_path': {'key': 'mountPath', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'PackageInputPathBase'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword input_type: Required. [Required] Type of the input included in the target image. - Possible values include: "UriFile", "UriFolder". - :paramtype input_type: str or ~azure.mgmt.machinelearningservices.models.PackageInputType - :keyword mode: Input delivery mode of the input. Possible values include: "Copy", "Download". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.PackageInputDeliveryMode - :keyword mount_path: Relative mount path of the input in the target image. - :paramtype mount_path: str - :keyword path: Required. [Required] Location of the input. - :paramtype path: ~azure.mgmt.machinelearningservices.models.PackageInputPathBase - """ - super(ModelPackageInput, self).__init__(**kwargs) - self.input_type = kwargs['input_type'] - self.mode = kwargs.get('mode', None) - self.mount_path = kwargs.get('mount_path', None) - self.path = kwargs['path'] - - -class ModelPerformanceSignal(MonitoringSignalBase): - """Model performance signal definition. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", - "Enabled". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". - :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType - :ivar data_segment: The data segment. - :vartype data_segment: ~azure.mgmt.machinelearningservices.models.MonitoringDataSegment - :ivar metric_threshold: Required. [Required] A list of metrics to calculate and their - associated thresholds. - :vartype metric_threshold: - ~azure.mgmt.machinelearningservices.models.ModelPerformanceMetricThresholdBase - :ivar production_data: Required. [Required] The data produced by the production service which - drift will be calculated for. - :vartype production_data: - list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] - :ivar reference_data: Required. [Required] The data to calculate drift against. - :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - """ - - _validation = { - 'signal_type': {'required': True}, - 'metric_threshold': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'data_segment': {'key': 'dataSegment', 'type': 'MonitoringDataSegment'}, - 'metric_threshold': {'key': 'metricThreshold', 'type': 'ModelPerformanceMetricThresholdBase'}, - 'production_data': {'key': 'productionData', 'type': '[MonitoringInputDataBase]'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword data_segment: The data segment. - :paramtype data_segment: ~azure.mgmt.machinelearningservices.models.MonitoringDataSegment - :keyword metric_threshold: Required. [Required] A list of metrics to calculate and their - associated thresholds. - :paramtype metric_threshold: - ~azure.mgmt.machinelearningservices.models.ModelPerformanceMetricThresholdBase - :keyword production_data: Required. [Required] The data produced by the production service - which drift will be calculated for. - :paramtype production_data: - list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] - :keyword reference_data: Required. [Required] The data to calculate drift against. - :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - """ - super(ModelPerformanceSignal, self).__init__(**kwargs) - self.signal_type = 'ModelPerformance' # type: str - self.data_segment = kwargs.get('data_segment', None) - self.metric_threshold = kwargs['metric_threshold'] - self.production_data = kwargs['production_data'] - self.reference_data = kwargs['reference_data'] - - -class ModelProfile(msrest.serialization.Model): - """ModelProfile. - - All required parameters must be populated in order to send to Azure. - - :ivar model_uri: Required. [Required] The model to create a serverless endpoint of. - :vartype model_uri: str - """ - - _validation = { - 'model_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'model_uri': {'key': 'modelUri', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword model_uri: Required. [Required] The model to create a serverless endpoint of. - :paramtype model_uri: str - """ - super(ModelProfile, self).__init__(**kwargs) - self.model_uri = kwargs['model_uri'] - - -class ModelVersion(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.ModelVersionProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ModelVersionProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.ModelVersionProperties - """ - super(ModelVersion, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class ModelVersionProperties(AssetBase): - """Model asset version details. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :vartype is_archived: bool - :ivar flavors: Mapping of model flavors to their properties. - :vartype flavors: dict[str, ~azure.mgmt.machinelearningservices.models.FlavorData] - :ivar intellectual_property: Intellectual Property details. Used if model is an Intellectual - Property. - :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar job_name: Name of the training job which produced this model. - :vartype job_name: str - :ivar model_type: The storage format for this entity. Used for NCD. - :vartype model_type: str - :ivar model_uri: The URI path to the model contents. - :vartype model_uri: str - :ivar provisioning_state: Provisioning state for the model version. Possible values include: - "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.AssetProvisioningState - :ivar stage: Stage in the model lifecycle assigned to this model. - :vartype stage: str - """ - - _validation = { - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'flavors': {'key': 'flavors', 'type': '{FlavorData}'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'job_name': {'key': 'jobName', 'type': 'str'}, - 'model_type': {'key': 'modelType', 'type': 'str'}, - 'model_uri': {'key': 'modelUri', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'stage': {'key': 'stage', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :paramtype is_archived: bool - :keyword flavors: Mapping of model flavors to their properties. - :paramtype flavors: dict[str, ~azure.mgmt.machinelearningservices.models.FlavorData] - :keyword intellectual_property: Intellectual Property details. Used if model is an Intellectual - Property. - :paramtype intellectual_property: - ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword job_name: Name of the training job which produced this model. - :paramtype job_name: str - :keyword model_type: The storage format for this entity. Used for NCD. - :paramtype model_type: str - :keyword model_uri: The URI path to the model contents. - :paramtype model_uri: str - :keyword stage: Stage in the model lifecycle assigned to this model. - :paramtype stage: str - """ - super(ModelVersionProperties, self).__init__(**kwargs) - self.flavors = kwargs.get('flavors', None) - self.intellectual_property = kwargs.get('intellectual_property', None) - self.job_name = kwargs.get('job_name', None) - self.model_type = kwargs.get('model_type', None) - self.model_uri = kwargs.get('model_uri', None) - self.provisioning_state = None - self.stage = kwargs.get('stage', None) - - -class ModelVersionResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of ModelVersion entities. - - :ivar next_link: The link to the next page of ModelVersion objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type ModelVersion. - :vartype value: list[~azure.mgmt.machinelearningservices.models.ModelVersion] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ModelVersion]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of ModelVersion objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type ModelVersion. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.ModelVersion] - """ - super(ModelVersionResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class MonitorComputeConfigurationBase(msrest.serialization.Model): - """Monitor compute configuration base definition. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MonitorServerlessSparkCompute. - - All required parameters must be populated in order to send to Azure. - - :ivar compute_type: Required. [Required] Specifies the type of signal to monitor.Constant - filled by server. Possible values include: "ServerlessSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.MonitorComputeType - """ - - _validation = { - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - } - - _subtype_map = { - 'compute_type': {'ServerlessSpark': 'MonitorServerlessSparkCompute'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(MonitorComputeConfigurationBase, self).__init__(**kwargs) - self.compute_type = None # type: Optional[str] - - -class MonitorDefinition(msrest.serialization.Model): - """MonitorDefinition. - - All required parameters must be populated in order to send to Azure. - - :ivar alert_notification_setting: The monitor's notification settings. - :vartype alert_notification_setting: - ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationSettingsBase - :ivar compute_configuration: Required. [Required] The ARM resource ID of the compute resource - to run the monitoring job on. - :vartype compute_configuration: - ~azure.mgmt.machinelearningservices.models.MonitorComputeConfigurationBase - :ivar monitoring_target: The ARM resource ID of either the model or deployment targeted by this - monitor. - :vartype monitoring_target: ~azure.mgmt.machinelearningservices.models.MonitoringTarget - :ivar signals: Required. [Required] The signals to monitor. - :vartype signals: dict[str, ~azure.mgmt.machinelearningservices.models.MonitoringSignalBase] - """ - - _validation = { - 'compute_configuration': {'required': True}, - 'signals': {'required': True}, - } - - _attribute_map = { - 'alert_notification_setting': {'key': 'alertNotificationSetting', 'type': 'MonitoringAlertNotificationSettingsBase'}, - 'compute_configuration': {'key': 'computeConfiguration', 'type': 'MonitorComputeConfigurationBase'}, - 'monitoring_target': {'key': 'monitoringTarget', 'type': 'MonitoringTarget'}, - 'signals': {'key': 'signals', 'type': '{MonitoringSignalBase}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword alert_notification_setting: The monitor's notification settings. - :paramtype alert_notification_setting: - ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationSettingsBase - :keyword compute_configuration: Required. [Required] The ARM resource ID of the compute - resource to run the monitoring job on. - :paramtype compute_configuration: - ~azure.mgmt.machinelearningservices.models.MonitorComputeConfigurationBase - :keyword monitoring_target: The ARM resource ID of either the model or deployment targeted by - this monitor. - :paramtype monitoring_target: ~azure.mgmt.machinelearningservices.models.MonitoringTarget - :keyword signals: Required. [Required] The signals to monitor. - :paramtype signals: dict[str, ~azure.mgmt.machinelearningservices.models.MonitoringSignalBase] - """ - super(MonitorDefinition, self).__init__(**kwargs) - self.alert_notification_setting = kwargs.get('alert_notification_setting', None) - self.compute_configuration = kwargs['compute_configuration'] - self.monitoring_target = kwargs.get('monitoring_target', None) - self.signals = kwargs['signals'] - - -class MonitoringDataSegment(msrest.serialization.Model): - """MonitoringDataSegment. - - :ivar feature: The feature to segment the data on. - :vartype feature: str - :ivar values: Filters for only the specified values of the given segmented feature. - :vartype values: list[str] - """ - - _attribute_map = { - 'feature': {'key': 'feature', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword feature: The feature to segment the data on. - :paramtype feature: str - :keyword values: Filters for only the specified values of the given segmented feature. - :paramtype values: list[str] - """ - super(MonitoringDataSegment, self).__init__(**kwargs) - self.feature = kwargs.get('feature', None) - self.values = kwargs.get('values', None) - - -class MonitoringTarget(msrest.serialization.Model): - """Monitoring target definition. - - All required parameters must be populated in order to send to Azure. - - :ivar deployment_id: The ARM resource ID of either the deployment targeted by this monitor. - :vartype deployment_id: str - :ivar model_id: The ARM resource ID of either the model targeted by this monitor. - :vartype model_id: str - :ivar task_type: Required. [Required] The machine learning task type of the model. Possible - values include: "Classification", "Regression", "QuestionAnswering". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.ModelTaskType - """ - - _validation = { - 'task_type': {'required': True}, - } - - _attribute_map = { - 'deployment_id': {'key': 'deploymentId', 'type': 'str'}, - 'model_id': {'key': 'modelId', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword deployment_id: The ARM resource ID of either the deployment targeted by this monitor. - :paramtype deployment_id: str - :keyword model_id: The ARM resource ID of either the model targeted by this monitor. - :paramtype model_id: str - :keyword task_type: Required. [Required] The machine learning task type of the model. Possible - values include: "Classification", "Regression", "QuestionAnswering". - :paramtype task_type: str or ~azure.mgmt.machinelearningservices.models.ModelTaskType - """ - super(MonitoringTarget, self).__init__(**kwargs) - self.deployment_id = kwargs.get('deployment_id', None) - self.model_id = kwargs.get('model_id', None) - self.task_type = kwargs['task_type'] - - -class MonitoringThreshold(msrest.serialization.Model): - """MonitoringThreshold. - - :ivar value: The threshold value. If null, the set default is dependent on the metric type. - :vartype value: float - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': 'float'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword value: The threshold value. If null, the set default is dependent on the metric type. - :paramtype value: float - """ - super(MonitoringThreshold, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class MonitoringWorkspaceConnection(msrest.serialization.Model): - """Monitoring workspace connection definition. - - :ivar environment_variables: The properties of a workspace service connection to store as - environment variables in the submitted jobs. - Key is workspace connection property path, name is environment variable key. - :vartype environment_variables: dict[str, str] - :ivar secrets: The properties of a workspace service connection to store as secrets in the - submitted jobs. - Key is workspace connection property path, name is secret key. - :vartype secrets: dict[str, str] - """ - - _attribute_map = { - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'secrets': {'key': 'secrets', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword environment_variables: The properties of a workspace service connection to store as - environment variables in the submitted jobs. - Key is workspace connection property path, name is environment variable key. - :paramtype environment_variables: dict[str, str] - :keyword secrets: The properties of a workspace service connection to store as secrets in the - submitted jobs. - Key is workspace connection property path, name is secret key. - :paramtype secrets: dict[str, str] - """ - super(MonitoringWorkspaceConnection, self).__init__(**kwargs) - self.environment_variables = kwargs.get('environment_variables', None) - self.secrets = kwargs.get('secrets', None) - - -class MonitorServerlessSparkCompute(MonitorComputeConfigurationBase): - """Monitor serverless spark compute definition. - - All required parameters must be populated in order to send to Azure. - - :ivar compute_type: Required. [Required] Specifies the type of signal to monitor.Constant - filled by server. Possible values include: "ServerlessSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.MonitorComputeType - :ivar compute_identity: Required. [Required] The identity scheme leveraged to by the spark jobs - running on serverless Spark. - :vartype compute_identity: - ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityBase - :ivar instance_type: Required. [Required] The instance type running the Spark job. - :vartype instance_type: str - :ivar runtime_version: Required. [Required] The Spark runtime version. - :vartype runtime_version: str - """ - - _validation = { - 'compute_type': {'required': True}, - 'compute_identity': {'required': True}, - 'instance_type': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'runtime_version': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_identity': {'key': 'computeIdentity', 'type': 'MonitorComputeIdentityBase'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword compute_identity: Required. [Required] The identity scheme leveraged to by the spark - jobs running on serverless Spark. - :paramtype compute_identity: - ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityBase - :keyword instance_type: Required. [Required] The instance type running the Spark job. - :paramtype instance_type: str - :keyword runtime_version: Required. [Required] The Spark runtime version. - :paramtype runtime_version: str - """ - super(MonitorServerlessSparkCompute, self).__init__(**kwargs) - self.compute_type = 'ServerlessSpark' # type: str - self.compute_identity = kwargs['compute_identity'] - self.instance_type = kwargs['instance_type'] - self.runtime_version = kwargs['runtime_version'] - - -class Mpi(DistributionConfiguration): - """MPI distribution configuration. - - All required parameters must be populated in order to send to Azure. - - :ivar distribution_type: Required. [Required] Specifies the type of distribution - framework.Constant filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi", - "Ray". - :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType - :ivar process_count_per_instance: Number of processes per MPI node. - :vartype process_count_per_instance: int - """ - - _validation = { - 'distribution_type': {'required': True}, - } - - _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, - 'process_count_per_instance': {'key': 'processCountPerInstance', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword process_count_per_instance: Number of processes per MPI node. - :paramtype process_count_per_instance: int - """ - super(Mpi, self).__init__(**kwargs) - self.distribution_type = 'Mpi' # type: str - self.process_count_per_instance = kwargs.get('process_count_per_instance', None) - - -class NlpFixedParameters(msrest.serialization.Model): - """Fixed training parameters that won't be swept over during AutoML NLP training. - - :ivar gradient_accumulation_steps: Number of steps to accumulate gradients over before running - a backward pass. - :vartype gradient_accumulation_steps: int - :ivar learning_rate: The learning rate for the training procedure. - :vartype learning_rate: float - :ivar learning_rate_scheduler: The type of learning rate schedule to use during the training - procedure. Possible values include: "None", "Linear", "Cosine", "CosineWithRestarts", - "Polynomial", "Constant", "ConstantWithWarmup". - :vartype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.NlpLearningRateScheduler - :ivar model_name: The name of the model to train. - :vartype model_name: str - :ivar number_of_epochs: Number of training epochs. - :vartype number_of_epochs: int - :ivar training_batch_size: The batch size for the training procedure. - :vartype training_batch_size: int - :ivar validation_batch_size: The batch size to be used during evaluation. - :vartype validation_batch_size: int - :ivar warmup_ratio: The warmup ratio, used alongside LrSchedulerType. - :vartype warmup_ratio: float - :ivar weight_decay: The weight decay for the training procedure. - :vartype weight_decay: float - """ - - _attribute_map = { - 'gradient_accumulation_steps': {'key': 'gradientAccumulationSteps', 'type': 'int'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, - 'warmup_ratio': {'key': 'warmupRatio', 'type': 'float'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword gradient_accumulation_steps: Number of steps to accumulate gradients over before - running a backward pass. - :paramtype gradient_accumulation_steps: int - :keyword learning_rate: The learning rate for the training procedure. - :paramtype learning_rate: float - :keyword learning_rate_scheduler: The type of learning rate schedule to use during the training - procedure. Possible values include: "None", "Linear", "Cosine", "CosineWithRestarts", - "Polynomial", "Constant", "ConstantWithWarmup". - :paramtype learning_rate_scheduler: str or - ~azure.mgmt.machinelearningservices.models.NlpLearningRateScheduler - :keyword model_name: The name of the model to train. - :paramtype model_name: str - :keyword number_of_epochs: Number of training epochs. - :paramtype number_of_epochs: int - :keyword training_batch_size: The batch size for the training procedure. - :paramtype training_batch_size: int - :keyword validation_batch_size: The batch size to be used during evaluation. - :paramtype validation_batch_size: int - :keyword warmup_ratio: The warmup ratio, used alongside LrSchedulerType. - :paramtype warmup_ratio: float - :keyword weight_decay: The weight decay for the training procedure. - :paramtype weight_decay: float - """ - super(NlpFixedParameters, self).__init__(**kwargs) - self.gradient_accumulation_steps = kwargs.get('gradient_accumulation_steps', None) - self.learning_rate = kwargs.get('learning_rate', None) - self.learning_rate_scheduler = kwargs.get('learning_rate_scheduler', None) - self.model_name = kwargs.get('model_name', None) - self.number_of_epochs = kwargs.get('number_of_epochs', None) - self.training_batch_size = kwargs.get('training_batch_size', None) - self.validation_batch_size = kwargs.get('validation_batch_size', None) - self.warmup_ratio = kwargs.get('warmup_ratio', None) - self.weight_decay = kwargs.get('weight_decay', None) - - -class NlpParameterSubspace(msrest.serialization.Model): - """Stringified search spaces for each parameter. See below examples. - - :ivar gradient_accumulation_steps: Number of steps to accumulate gradients over before running - a backward pass. - :vartype gradient_accumulation_steps: str - :ivar learning_rate: The learning rate for the training procedure. - :vartype learning_rate: str - :ivar learning_rate_scheduler: The type of learning rate schedule to use during the training - procedure. - :vartype learning_rate_scheduler: str - :ivar model_name: The name of the model to train. - :vartype model_name: str - :ivar number_of_epochs: Number of training epochs. - :vartype number_of_epochs: str - :ivar training_batch_size: The batch size for the training procedure. - :vartype training_batch_size: str - :ivar validation_batch_size: The batch size to be used during evaluation. - :vartype validation_batch_size: str - :ivar warmup_ratio: The warmup ratio, used alongside LrSchedulerType. - :vartype warmup_ratio: str - :ivar weight_decay: The weight decay for the training procedure. - :vartype weight_decay: str - """ - - _attribute_map = { - 'gradient_accumulation_steps': {'key': 'gradientAccumulationSteps', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, - 'warmup_ratio': {'key': 'warmupRatio', 'type': 'str'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword gradient_accumulation_steps: Number of steps to accumulate gradients over before - running a backward pass. - :paramtype gradient_accumulation_steps: str - :keyword learning_rate: The learning rate for the training procedure. - :paramtype learning_rate: str - :keyword learning_rate_scheduler: The type of learning rate schedule to use during the training - procedure. - :paramtype learning_rate_scheduler: str - :keyword model_name: The name of the model to train. - :paramtype model_name: str - :keyword number_of_epochs: Number of training epochs. - :paramtype number_of_epochs: str - :keyword training_batch_size: The batch size for the training procedure. - :paramtype training_batch_size: str - :keyword validation_batch_size: The batch size to be used during evaluation. - :paramtype validation_batch_size: str - :keyword warmup_ratio: The warmup ratio, used alongside LrSchedulerType. - :paramtype warmup_ratio: str - :keyword weight_decay: The weight decay for the training procedure. - :paramtype weight_decay: str - """ - super(NlpParameterSubspace, self).__init__(**kwargs) - self.gradient_accumulation_steps = kwargs.get('gradient_accumulation_steps', None) - self.learning_rate = kwargs.get('learning_rate', None) - self.learning_rate_scheduler = kwargs.get('learning_rate_scheduler', None) - self.model_name = kwargs.get('model_name', None) - self.number_of_epochs = kwargs.get('number_of_epochs', None) - self.training_batch_size = kwargs.get('training_batch_size', None) - self.validation_batch_size = kwargs.get('validation_batch_size', None) - self.warmup_ratio = kwargs.get('warmup_ratio', None) - self.weight_decay = kwargs.get('weight_decay', None) - - -class NlpSweepSettings(msrest.serialization.Model): - """Model sweeping and hyperparameter tuning related settings. - - All required parameters must be populated in order to send to Azure. - - :ivar early_termination: Type of early termination policy for the sweeping job. - :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :ivar sampling_algorithm: Required. [Required] Type of sampling algorithm. Possible values - include: "Grid", "Random", "Bayesian". - :vartype sampling_algorithm: str or - ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType - """ - - _validation = { - 'sampling_algorithm': {'required': True}, - } - - _attribute_map = { - 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, - 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword early_termination: Type of early termination policy for the sweeping job. - :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :keyword sampling_algorithm: Required. [Required] Type of sampling algorithm. Possible values - include: "Grid", "Random", "Bayesian". - :paramtype sampling_algorithm: str or - ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType - """ - super(NlpSweepSettings, self).__init__(**kwargs) - self.early_termination = kwargs.get('early_termination', None) - self.sampling_algorithm = kwargs['sampling_algorithm'] - - -class NlpVertical(msrest.serialization.Model): - """Abstract class for NLP related AutoML tasks. -NLP - Natural Language Processing. - - :ivar featurization_settings: Featurization inputs needed for AutoML job. - :vartype featurization_settings: - ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings - :ivar fixed_parameters: Model/training parameters that will remain constant throughout - training. - :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters - :ivar limit_settings: Execution constraints for AutoMLJob. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] - :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - """ - - _attribute_map = { - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'NlpFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, - 'search_space': {'key': 'searchSpace', 'type': '[NlpParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'NlpSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword featurization_settings: Featurization inputs needed for AutoML job. - :paramtype featurization_settings: - ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings - :keyword fixed_parameters: Model/training parameters that will remain constant throughout - training. - :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters - :keyword limit_settings: Execution constraints for AutoMLJob. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] - :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - """ - super(NlpVertical, self).__init__(**kwargs) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - - -class NlpVerticalFeaturizationSettings(FeaturizationSettings): - """NlpVerticalFeaturizationSettings. - - :ivar dataset_language: Dataset language, useful for the text data. - :vartype dataset_language: str - """ - - _attribute_map = { - 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword dataset_language: Dataset language, useful for the text data. - :paramtype dataset_language: str - """ - super(NlpVerticalFeaturizationSettings, self).__init__(**kwargs) - - -class NlpVerticalLimitSettings(msrest.serialization.Model): - """Job execution constraints. - - :ivar max_concurrent_trials: Maximum Concurrent AutoML iterations. - :vartype max_concurrent_trials: int - :ivar max_nodes: Maximum nodes to use for the experiment. - :vartype max_nodes: int - :ivar max_trials: Number of AutoML iterations. - :vartype max_trials: int - :ivar timeout: AutoML job timeout. - :vartype timeout: ~datetime.timedelta - :ivar trial_timeout: Timeout for individual HD trials. - :vartype trial_timeout: ~datetime.timedelta - """ - - _attribute_map = { - 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, - 'max_nodes': {'key': 'maxNodes', 'type': 'int'}, - 'max_trials': {'key': 'maxTrials', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, - 'trial_timeout': {'key': 'trialTimeout', 'type': 'duration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword max_concurrent_trials: Maximum Concurrent AutoML iterations. - :paramtype max_concurrent_trials: int - :keyword max_nodes: Maximum nodes to use for the experiment. - :paramtype max_nodes: int - :keyword max_trials: Number of AutoML iterations. - :paramtype max_trials: int - :keyword timeout: AutoML job timeout. - :paramtype timeout: ~datetime.timedelta - :keyword trial_timeout: Timeout for individual HD trials. - :paramtype trial_timeout: ~datetime.timedelta - """ - super(NlpVerticalLimitSettings, self).__init__(**kwargs) - self.max_concurrent_trials = kwargs.get('max_concurrent_trials', 1) - self.max_nodes = kwargs.get('max_nodes', 1) - self.max_trials = kwargs.get('max_trials', 1) - self.timeout = kwargs.get('timeout', "P7D") - self.trial_timeout = kwargs.get('trial_timeout', None) - - -class NodeStateCounts(msrest.serialization.Model): - """Counts of various compute node states on the amlCompute. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar idle_node_count: Number of compute nodes in idle state. - :vartype idle_node_count: int - :ivar running_node_count: Number of compute nodes which are running jobs. - :vartype running_node_count: int - :ivar preparing_node_count: Number of compute nodes which are being prepared. - :vartype preparing_node_count: int - :ivar unusable_node_count: Number of compute nodes which are in unusable state. - :vartype unusable_node_count: int - :ivar leaving_node_count: Number of compute nodes which are leaving the amlCompute. - :vartype leaving_node_count: int - :ivar preempted_node_count: Number of compute nodes which are in preempted state. - :vartype preempted_node_count: int - """ - - _validation = { - 'idle_node_count': {'readonly': True}, - 'running_node_count': {'readonly': True}, - 'preparing_node_count': {'readonly': True}, - 'unusable_node_count': {'readonly': True}, - 'leaving_node_count': {'readonly': True}, - 'preempted_node_count': {'readonly': True}, - } - - _attribute_map = { - 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, - 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, - 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, - 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, - 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, - 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(NodeStateCounts, self).__init__(**kwargs) - self.idle_node_count = None - self.running_node_count = None - self.preparing_node_count = None - self.unusable_node_count = None - self.leaving_node_count = None - self.preempted_node_count = None - - -class NoneAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): - """NoneAuthTypeWorkspaceConnectionProperties. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar expiry_time: - :vartype expiry_time: ~datetime.datetime - :ivar metadata: Any object. - :vartype metadata: any - :ivar target: - :vartype target: str - """ - - _validation = { - 'auth_type': {'required': True}, - } - - _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :keyword expiry_time: - :paramtype expiry_time: ~datetime.datetime - :keyword metadata: Any object. - :paramtype metadata: any - :keyword target: - :paramtype target: str - """ - super(NoneAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'None' # type: str - - -class NoneDatastoreCredentials(DatastoreCredentials): - """Empty/none datastore credentials. - - All required parameters must be populated in order to send to Azure. - - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". - :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType - """ - - _validation = { - 'credentials_type': {'required': True}, - } - - _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(NoneDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'None' # type: str - - -class NotebookAccessTokenResult(msrest.serialization.Model): - """NotebookAccessTokenResult. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar access_token: - :vartype access_token: str - :ivar expires_in: - :vartype expires_in: int - :ivar host_name: - :vartype host_name: str - :ivar notebook_resource_id: - :vartype notebook_resource_id: str - :ivar public_dns: - :vartype public_dns: str - :ivar refresh_token: - :vartype refresh_token: str - :ivar scope: - :vartype scope: str - :ivar token_type: - :vartype token_type: str - """ - - _validation = { - 'access_token': {'readonly': True}, - 'expires_in': {'readonly': True}, - 'host_name': {'readonly': True}, - 'notebook_resource_id': {'readonly': True}, - 'public_dns': {'readonly': True}, - 'refresh_token': {'readonly': True}, - 'scope': {'readonly': True}, - 'token_type': {'readonly': True}, - } - - _attribute_map = { - 'access_token': {'key': 'accessToken', 'type': 'str'}, - 'expires_in': {'key': 'expiresIn', 'type': 'int'}, - 'host_name': {'key': 'hostName', 'type': 'str'}, - 'notebook_resource_id': {'key': 'notebookResourceId', 'type': 'str'}, - 'public_dns': {'key': 'publicDns', 'type': 'str'}, - 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, - 'scope': {'key': 'scope', 'type': 'str'}, - 'token_type': {'key': 'tokenType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(NotebookAccessTokenResult, self).__init__(**kwargs) - self.access_token = None - self.expires_in = None - self.host_name = None - self.notebook_resource_id = None - self.public_dns = None - self.refresh_token = None - self.scope = None - self.token_type = None - - -class NotebookPreparationError(msrest.serialization.Model): - """NotebookPreparationError. - - :ivar error_message: - :vartype error_message: str - :ivar status_code: - :vartype status_code: int - """ - - _attribute_map = { - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'status_code': {'key': 'statusCode', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword error_message: - :paramtype error_message: str - :keyword status_code: - :paramtype status_code: int - """ - super(NotebookPreparationError, self).__init__(**kwargs) - self.error_message = kwargs.get('error_message', None) - self.status_code = kwargs.get('status_code', None) - - -class NotebookResourceInfo(msrest.serialization.Model): - """NotebookResourceInfo. - - :ivar fqdn: - :vartype fqdn: str - :ivar is_private_link_enabled: - :vartype is_private_link_enabled: bool - :ivar notebook_preparation_error: The error that occurs when preparing notebook. - :vartype notebook_preparation_error: - ~azure.mgmt.machinelearningservices.models.NotebookPreparationError - :ivar resource_id: the data plane resourceId that used to initialize notebook component. - :vartype resource_id: str - """ - - _attribute_map = { - 'fqdn': {'key': 'fqdn', 'type': 'str'}, - 'is_private_link_enabled': {'key': 'isPrivateLinkEnabled', 'type': 'bool'}, - 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword fqdn: - :paramtype fqdn: str - :keyword is_private_link_enabled: - :paramtype is_private_link_enabled: bool - :keyword notebook_preparation_error: The error that occurs when preparing notebook. - :paramtype notebook_preparation_error: - ~azure.mgmt.machinelearningservices.models.NotebookPreparationError - :keyword resource_id: the data plane resourceId that used to initialize notebook component. - :paramtype resource_id: str - """ - super(NotebookResourceInfo, self).__init__(**kwargs) - self.fqdn = kwargs.get('fqdn', None) - self.is_private_link_enabled = kwargs.get('is_private_link_enabled', None) - self.notebook_preparation_error = kwargs.get('notebook_preparation_error', None) - self.resource_id = kwargs.get('resource_id', None) - - -class NotificationSetting(msrest.serialization.Model): - """Configuration for notification. - - :ivar email_on: Send email notification to user on specified notification type. - :vartype email_on: list[str or - ~azure.mgmt.machinelearningservices.models.EmailNotificationEnableType] - :ivar emails: This is the email recipient list which has a limitation of 499 characters in - total concat with comma separator. - :vartype emails: list[str] - :ivar webhooks: Send webhook callback to a service. Key is a user-provided name for the - webhook. - :vartype webhooks: dict[str, ~azure.mgmt.machinelearningservices.models.Webhook] - """ - - _attribute_map = { - 'email_on': {'key': 'emailOn', 'type': '[str]'}, - 'emails': {'key': 'emails', 'type': '[str]'}, - 'webhooks': {'key': 'webhooks', 'type': '{Webhook}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword email_on: Send email notification to user on specified notification type. - :paramtype email_on: list[str or - ~azure.mgmt.machinelearningservices.models.EmailNotificationEnableType] - :keyword emails: This is the email recipient list which has a limitation of 499 characters in - total concat with comma separator. - :paramtype emails: list[str] - :keyword webhooks: Send webhook callback to a service. Key is a user-provided name for the - webhook. - :paramtype webhooks: dict[str, ~azure.mgmt.machinelearningservices.models.Webhook] - """ - super(NotificationSetting, self).__init__(**kwargs) - self.email_on = kwargs.get('email_on', None) - self.emails = kwargs.get('emails', None) - self.webhooks = kwargs.get('webhooks', None) - - -class NumericalDataDriftMetricThreshold(DataDriftMetricThresholdBase): - """NumericalDataDriftMetricThreshold. - - All required parameters must be populated in order to send to Azure. - - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType - :ivar threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The numerical data drift metric to calculate. Possible - values include: "JensenShannonDistance", "PopulationStabilityIndex", - "NormalizedWassersteinDistance", "TwoSampleKolmogorovSmirnovTest". - :vartype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalDataDriftMetric - """ - - _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, - } - - _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The numerical data drift metric to calculate. Possible - values include: "JensenShannonDistance", "PopulationStabilityIndex", - "NormalizedWassersteinDistance", "TwoSampleKolmogorovSmirnovTest". - :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalDataDriftMetric - """ - super(NumericalDataDriftMetricThreshold, self).__init__(**kwargs) - self.data_type = 'Numerical' # type: str - self.metric = kwargs['metric'] - - -class NumericalDataQualityMetricThreshold(DataQualityMetricThresholdBase): - """NumericalDataQualityMetricThreshold. - - All required parameters must be populated in order to send to Azure. - - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType - :ivar threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The numerical data quality metric to calculate. Possible - values include: "NullValueRate", "DataTypeErrorRate", "OutOfBoundsRate". - :vartype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalDataQualityMetric - """ - - _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, - } - - _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The numerical data quality metric to calculate. Possible - values include: "NullValueRate", "DataTypeErrorRate", "OutOfBoundsRate". - :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalDataQualityMetric - """ - super(NumericalDataQualityMetricThreshold, self).__init__(**kwargs) - self.data_type = 'Numerical' # type: str - self.metric = kwargs['metric'] - - -class NumericalPredictionDriftMetricThreshold(PredictionDriftMetricThresholdBase): - """NumericalPredictionDriftMetricThreshold. - - All required parameters must be populated in order to send to Azure. - - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType - :ivar threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The numerical prediction drift metric to calculate. Possible - values include: "JensenShannonDistance", "PopulationStabilityIndex", - "NormalizedWassersteinDistance", "TwoSampleKolmogorovSmirnovTest". - :vartype metric: str or - ~azure.mgmt.machinelearningservices.models.NumericalPredictionDriftMetric - """ - - _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, - } - - _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The numerical prediction drift metric to calculate. - Possible values include: "JensenShannonDistance", "PopulationStabilityIndex", - "NormalizedWassersteinDistance", "TwoSampleKolmogorovSmirnovTest". - :paramtype metric: str or - ~azure.mgmt.machinelearningservices.models.NumericalPredictionDriftMetric - """ - super(NumericalPredictionDriftMetricThreshold, self).__init__(**kwargs) - self.data_type = 'Numerical' # type: str - self.metric = kwargs['metric'] - - -class Objective(msrest.serialization.Model): - """Optimization objective. - - All required parameters must be populated in order to send to Azure. - - :ivar goal: Required. [Required] Defines supported metric goals for hyperparameter tuning. - Possible values include: "Minimize", "Maximize". - :vartype goal: str or ~azure.mgmt.machinelearningservices.models.Goal - :ivar primary_metric: Required. [Required] Name of the metric to optimize. - :vartype primary_metric: str - """ - - _validation = { - 'goal': {'required': True}, - 'primary_metric': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'goal': {'key': 'goal', 'type': 'str'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword goal: Required. [Required] Defines supported metric goals for hyperparameter tuning. - Possible values include: "Minimize", "Maximize". - :paramtype goal: str or ~azure.mgmt.machinelearningservices.models.Goal - :keyword primary_metric: Required. [Required] Name of the metric to optimize. - :paramtype primary_metric: str - """ - super(Objective, self).__init__(**kwargs) - self.goal = kwargs['goal'] - self.primary_metric = kwargs['primary_metric'] - - -class OneLakeDatastore(DatastoreProperties): - """OneLake (Trident) datastore configuration. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar credentials: Required. [Required] Account credentials. - :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled - by server. Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", - "AzureFile", "Hdfs", "OneLake". - :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType - :ivar intellectual_property: Intellectual Property details. - :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar is_default: Readonly property to indicate if datastore is the workspace default - datastore. - :vartype is_default: bool - :ivar artifact: Required. [Required] OneLake artifact backing the datastore. - :vartype artifact: ~azure.mgmt.machinelearningservices.models.OneLakeArtifact - :ivar endpoint: OneLake endpoint to use for the datastore. - :vartype endpoint: str - :ivar one_lake_workspace_name: Required. [Required] OneLake workspace name. - :vartype one_lake_workspace_name: str - :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". - :vartype service_data_access_auth_identity: str or - ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity - """ - - _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'artifact': {'required': True}, - 'one_lake_workspace_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'artifact': {'key': 'artifact', 'type': 'OneLakeArtifact'}, - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'one_lake_workspace_name': {'key': 'oneLakeWorkspaceName', 'type': 'str'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword credentials: Required. [Required] Account credentials. - :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :keyword intellectual_property: Intellectual Property details. - :paramtype intellectual_property: - ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword artifact: Required. [Required] OneLake artifact backing the datastore. - :paramtype artifact: ~azure.mgmt.machinelearningservices.models.OneLakeArtifact - :keyword endpoint: OneLake endpoint to use for the datastore. - :paramtype endpoint: str - :keyword one_lake_workspace_name: Required. [Required] OneLake workspace name. - :paramtype one_lake_workspace_name: str - :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". - :paramtype service_data_access_auth_identity: str or - ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity - """ - super(OneLakeDatastore, self).__init__(**kwargs) - self.datastore_type = 'OneLake' # type: str - self.artifact = kwargs['artifact'] - self.endpoint = kwargs.get('endpoint', None) - self.one_lake_workspace_name = kwargs['one_lake_workspace_name'] - self.service_data_access_auth_identity = kwargs.get('service_data_access_auth_identity', None) - - -class OnlineDeployment(TrackedResource): - """OnlineDeployment. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: A set of tags. Resource tags. - :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. - :vartype location: str - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :vartype kind: str - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.OnlineDeploymentProperties - :ivar sku: Sku details required for ARM contract for Autoscaling. - :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'OnlineDeploymentProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword tags: A set of tags. Resource tags. - :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. - :paramtype location: str - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :paramtype kind: str - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.OnlineDeploymentProperties - :keyword sku: Sku details required for ARM contract for Autoscaling. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - """ - super(OnlineDeployment, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.properties = kwargs['properties'] - self.sku = kwargs.get('sku', None) - - -class OnlineDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of OnlineDeployment entities. - - :ivar next_link: The link to the next page of OnlineDeployment objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type OnlineDeployment. - :vartype value: list[~azure.mgmt.machinelearningservices.models.OnlineDeployment] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[OnlineDeployment]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of OnlineDeployment objects. If null, there are - no additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type OnlineDeployment. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.OnlineDeployment] - """ - super(OnlineDeploymentTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class OnlineEndpoint(TrackedResource): - """OnlineEndpoint. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: A set of tags. Resource tags. - :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. - :vartype location: str - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :vartype kind: str - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.OnlineEndpointProperties - :ivar sku: Sku details required for ARM contract for Autoscaling. - :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'OnlineEndpointProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword tags: A set of tags. Resource tags. - :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. - :paramtype location: str - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :paramtype kind: str - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.OnlineEndpointProperties - :keyword sku: Sku details required for ARM contract for Autoscaling. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - """ - super(OnlineEndpoint, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.properties = kwargs['properties'] - self.sku = kwargs.get('sku', None) - - -class OnlineEndpointProperties(EndpointPropertiesBase): - """Online endpoint configuration. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' for - Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. - Possible values include: "AMLToken", "Key", "AADToken". - :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode - :ivar description: Description of the inference endpoint. - :vartype description: str - :ivar keys: EndpointAuthKeys to set initially on an Endpoint. - This property will always be returned as null. AuthKey values must be retrieved using the - ListKeys API. - :vartype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar scoring_uri: Endpoint URI. - :vartype scoring_uri: str - :ivar swagger_uri: Endpoint Swagger URI. - :vartype swagger_uri: str - :ivar compute: ARM resource ID of the compute if it exists. - optional. - :vartype compute: str - :ivar mirror_traffic: Percentage of traffic to be mirrored to each deployment without using - returned scoring. Traffic values need to sum to utmost 50. - :vartype mirror_traffic: dict[str, int] - :ivar provisioning_state: Provisioning state for the endpoint. Possible values include: - "Creating", "Deleting", "Succeeded", "Failed", "Updating", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState - :ivar public_network_access: Set to "Enabled" for endpoints that should allow public access - when Private Link is enabled. Possible values include: "Enabled", "Disabled". - :vartype public_network_access: str or - ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType - :ivar traffic: Percentage of traffic from endpoint to divert to each deployment. Traffic values - need to sum to 100. - :vartype traffic: dict[str, int] - """ - - _validation = { - 'auth_mode': {'required': True}, - 'scoring_uri': {'readonly': True}, - 'swagger_uri': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'auth_mode': {'key': 'authMode', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, - 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, - 'compute': {'key': 'compute', 'type': 'str'}, - 'mirror_traffic': {'key': 'mirrorTraffic', 'type': '{int}'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'public_network_access': {'key': 'publicNetworkAccess', 'type': 'str'}, - 'traffic': {'key': 'traffic', 'type': '{int}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' - for Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' - does. Possible values include: "AMLToken", "Key", "AADToken". - :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode - :keyword description: Description of the inference endpoint. - :paramtype description: str - :keyword keys: EndpointAuthKeys to set initially on an Endpoint. - This property will always be returned as null. AuthKey values must be retrieved using the - ListKeys API. - :paramtype keys: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword compute: ARM resource ID of the compute if it exists. - optional. - :paramtype compute: str - :keyword mirror_traffic: Percentage of traffic to be mirrored to each deployment without using - returned scoring. Traffic values need to sum to utmost 50. - :paramtype mirror_traffic: dict[str, int] - :keyword public_network_access: Set to "Enabled" for endpoints that should allow public access - when Private Link is enabled. Possible values include: "Enabled", "Disabled". - :paramtype public_network_access: str or - ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType - :keyword traffic: Percentage of traffic from endpoint to divert to each deployment. Traffic - values need to sum to 100. - :paramtype traffic: dict[str, int] - """ - super(OnlineEndpointProperties, self).__init__(**kwargs) - self.compute = kwargs.get('compute', None) - self.mirror_traffic = kwargs.get('mirror_traffic', None) - self.provisioning_state = None - self.public_network_access = kwargs.get('public_network_access', None) - self.traffic = kwargs.get('traffic', None) - - -class OnlineEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of OnlineEndpoint entities. - - :ivar next_link: The link to the next page of OnlineEndpoint objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type OnlineEndpoint. - :vartype value: list[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[OnlineEndpoint]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of OnlineEndpoint objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type OnlineEndpoint. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] - """ - super(OnlineEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class OnlineInferenceConfiguration(msrest.serialization.Model): - """Online inference configuration options. - - :ivar configurations: Additional configurations. - :vartype configurations: dict[str, str] - :ivar entry_script: Entry script or command to invoke. - :vartype entry_script: str - :ivar liveness_route: The route to check the liveness of the inference server container. - :vartype liveness_route: ~azure.mgmt.machinelearningservices.models.Route - :ivar readiness_route: The route to check the readiness of the inference server container. - :vartype readiness_route: ~azure.mgmt.machinelearningservices.models.Route - :ivar scoring_route: The port to send the scoring requests to, within the inference server - container. - :vartype scoring_route: ~azure.mgmt.machinelearningservices.models.Route - """ - - _attribute_map = { - 'configurations': {'key': 'configurations', 'type': '{str}'}, - 'entry_script': {'key': 'entryScript', 'type': 'str'}, - 'liveness_route': {'key': 'livenessRoute', 'type': 'Route'}, - 'readiness_route': {'key': 'readinessRoute', 'type': 'Route'}, - 'scoring_route': {'key': 'scoringRoute', 'type': 'Route'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword configurations: Additional configurations. - :paramtype configurations: dict[str, str] - :keyword entry_script: Entry script or command to invoke. - :paramtype entry_script: str - :keyword liveness_route: The route to check the liveness of the inference server container. - :paramtype liveness_route: ~azure.mgmt.machinelearningservices.models.Route - :keyword readiness_route: The route to check the readiness of the inference server container. - :paramtype readiness_route: ~azure.mgmt.machinelearningservices.models.Route - :keyword scoring_route: The port to send the scoring requests to, within the inference server - container. - :paramtype scoring_route: ~azure.mgmt.machinelearningservices.models.Route - """ - super(OnlineInferenceConfiguration, self).__init__(**kwargs) - self.configurations = kwargs.get('configurations', None) - self.entry_script = kwargs.get('entry_script', None) - self.liveness_route = kwargs.get('liveness_route', None) - self.readiness_route = kwargs.get('readiness_route', None) - self.scoring_route = kwargs.get('scoring_route', None) - - -class OnlineRequestSettings(msrest.serialization.Model): - """Online deployment scoring requests configuration. - - :ivar max_concurrent_requests_per_instance: The number of maximum concurrent requests per node - allowed per deployment. Defaults to 1. - :vartype max_concurrent_requests_per_instance: int - :ivar max_queue_wait: The maximum amount of time a request will stay in the queue in ISO 8601 - format. - Defaults to 500ms. - :vartype max_queue_wait: ~datetime.timedelta - :ivar request_timeout: The scoring timeout in ISO 8601 format. - Defaults to 5000ms. - :vartype request_timeout: ~datetime.timedelta - """ - - _attribute_map = { - 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'}, - 'max_queue_wait': {'key': 'maxQueueWait', 'type': 'duration'}, - 'request_timeout': {'key': 'requestTimeout', 'type': 'duration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword max_concurrent_requests_per_instance: The number of maximum concurrent requests per - node allowed per deployment. Defaults to 1. - :paramtype max_concurrent_requests_per_instance: int - :keyword max_queue_wait: The maximum amount of time a request will stay in the queue in ISO - 8601 format. - Defaults to 500ms. - :paramtype max_queue_wait: ~datetime.timedelta - :keyword request_timeout: The scoring timeout in ISO 8601 format. - Defaults to 5000ms. - :paramtype request_timeout: ~datetime.timedelta - """ - super(OnlineRequestSettings, self).__init__(**kwargs) - self.max_concurrent_requests_per_instance = kwargs.get('max_concurrent_requests_per_instance', 1) - self.max_queue_wait = kwargs.get('max_queue_wait', "PT0.5S") - self.request_timeout = kwargs.get('request_timeout', "PT5S") - - -class OperationDisplay(msrest.serialization.Model): - """Display name of operation. - - :ivar description: Gets or sets the description for the operation. - :vartype description: str - :ivar operation: Gets or sets the operation that users can perform. - :vartype operation: str - :ivar provider: Gets or sets the resource provider name: - Microsoft.MachineLearningExperimentation. - :vartype provider: str - :ivar resource: Gets or sets the resource on which the operation is performed. - :vartype resource: str - """ - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: Gets or sets the description for the operation. - :paramtype description: str - :keyword operation: Gets or sets the operation that users can perform. - :paramtype operation: str - :keyword provider: Gets or sets the resource provider name: - Microsoft.MachineLearningExperimentation. - :paramtype provider: str - :keyword resource: Gets or sets the resource on which the operation is performed. - :paramtype resource: str - """ - super(OperationDisplay, self).__init__(**kwargs) - self.description = kwargs.get('description', None) - self.operation = kwargs.get('operation', None) - self.provider = kwargs.get('provider', None) - self.resource = kwargs.get('resource', None) - - -class OsPatchingStatus(msrest.serialization.Model): - """Returns metadata about the os patching. - - :ivar patch_status: The os patching status. Possible values include: "CompletedWithWarnings", - "Failed", "InProgress", "Succeeded", "Unknown". - :vartype patch_status: str or ~azure.mgmt.machinelearningservices.models.PatchStatus - :ivar latest_patch_time: Time of the latest os patching. - :vartype latest_patch_time: str - :ivar reboot_pending: Specifies whether this compute instance is pending for reboot to finish - os patching. - :vartype reboot_pending: bool - :ivar scheduled_reboot_time: Time of scheduled reboot. - :vartype scheduled_reboot_time: str - """ - - _attribute_map = { - 'patch_status': {'key': 'patchStatus', 'type': 'str'}, - 'latest_patch_time': {'key': 'latestPatchTime', 'type': 'str'}, - 'reboot_pending': {'key': 'rebootPending', 'type': 'bool'}, - 'scheduled_reboot_time': {'key': 'scheduledRebootTime', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword patch_status: The os patching status. Possible values include: - "CompletedWithWarnings", "Failed", "InProgress", "Succeeded", "Unknown". - :paramtype patch_status: str or ~azure.mgmt.machinelearningservices.models.PatchStatus - :keyword latest_patch_time: Time of the latest os patching. - :paramtype latest_patch_time: str - :keyword reboot_pending: Specifies whether this compute instance is pending for reboot to - finish os patching. - :paramtype reboot_pending: bool - :keyword scheduled_reboot_time: Time of scheduled reboot. - :paramtype scheduled_reboot_time: str - """ - super(OsPatchingStatus, self).__init__(**kwargs) - self.patch_status = kwargs.get('patch_status', None) - self.latest_patch_time = kwargs.get('latest_patch_time', None) - self.reboot_pending = kwargs.get('reboot_pending', None) - self.scheduled_reboot_time = kwargs.get('scheduled_reboot_time', None) - - -class OutboundRuleBasicResource(Resource): - """Outbound Rule Basic Resource for the managed network of a machine learning workspace. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. Outbound Rule for the managed network of a machine learning - workspace. - :vartype properties: ~azure.mgmt.machinelearningservices.models.OutboundRule - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'OutboundRule'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. Outbound Rule for the managed network of a machine learning - workspace. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.OutboundRule - """ - super(OutboundRuleBasicResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class OutboundRuleListResult(msrest.serialization.Model): - """List of outbound rules for the managed network of a machine learning workspace. - - :ivar next_link: The link to the next page constructed using the continuationToken. If null, - there are no additional pages. - :vartype next_link: str - :ivar value: The list of machine learning workspaces. Since this list may be incomplete, the - nextLink field should be used to request the next list of machine learning workspaces. - :vartype value: list[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[OutboundRuleBasicResource]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page constructed using the continuationToken. If - null, there are no additional pages. - :paramtype next_link: str - :keyword value: The list of machine learning workspaces. Since this list may be incomplete, the - nextLink field should be used to request the next list of machine learning workspaces. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] - """ - super(OutboundRuleListResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class OutputPathAssetReference(AssetReferenceBase): - """Reference to an asset via its path in a job output. - - All required parameters must be populated in order to send to Azure. - - :ivar reference_type: Required. [Required] Specifies the type of asset reference.Constant - filled by server. Possible values include: "Id", "DataPath", "OutputPath". - :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType - :ivar job_id: ARM resource ID of the job. - :vartype job_id: str - :ivar path: The path of the file/directory in the job output. - :vartype path: str - """ - - _validation = { - 'reference_type': {'required': True}, - } - - _attribute_map = { - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - 'job_id': {'key': 'jobId', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword job_id: ARM resource ID of the job. - :paramtype job_id: str - :keyword path: The path of the file/directory in the job output. - :paramtype path: str - """ - super(OutputPathAssetReference, self).__init__(**kwargs) - self.reference_type = 'OutputPath' # type: str - self.job_id = kwargs.get('job_id', None) - self.path = kwargs.get('path', None) - - -class PackageInputPathBase(msrest.serialization.Model): - """PackageInputPathBase. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: PackageInputPathId, PackageInputPathVersion, PackageInputPathUrl. - - All required parameters must be populated in order to send to Azure. - - :ivar input_path_type: Required. [Required] Input path type for package inputs.Constant filled - by server. Possible values include: "Url", "PathId", "PathVersion". - :vartype input_path_type: str or ~azure.mgmt.machinelearningservices.models.InputPathType - """ - - _validation = { - 'input_path_type': {'required': True}, - } - - _attribute_map = { - 'input_path_type': {'key': 'inputPathType', 'type': 'str'}, - } - - _subtype_map = { - 'input_path_type': {'PathId': 'PackageInputPathId', 'PathVersion': 'PackageInputPathVersion', 'Url': 'PackageInputPathUrl'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(PackageInputPathBase, self).__init__(**kwargs) - self.input_path_type = None # type: Optional[str] - - -class PackageInputPathId(PackageInputPathBase): - """Package input path specified with a resource id. - - All required parameters must be populated in order to send to Azure. - - :ivar input_path_type: Required. [Required] Input path type for package inputs.Constant filled - by server. Possible values include: "Url", "PathId", "PathVersion". - :vartype input_path_type: str or ~azure.mgmt.machinelearningservices.models.InputPathType - :ivar resource_id: Input resource id. - :vartype resource_id: str - """ - - _validation = { - 'input_path_type': {'required': True}, - } - - _attribute_map = { - 'input_path_type': {'key': 'inputPathType', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword resource_id: Input resource id. - :paramtype resource_id: str - """ - super(PackageInputPathId, self).__init__(**kwargs) - self.input_path_type = 'PathId' # type: str - self.resource_id = kwargs.get('resource_id', None) - - -class PackageInputPathUrl(PackageInputPathBase): - """Package input path specified as an url. - - All required parameters must be populated in order to send to Azure. - - :ivar input_path_type: Required. [Required] Input path type for package inputs.Constant filled - by server. Possible values include: "Url", "PathId", "PathVersion". - :vartype input_path_type: str or ~azure.mgmt.machinelearningservices.models.InputPathType - :ivar url: Input path url. - :vartype url: str - """ - - _validation = { - 'input_path_type': {'required': True}, - } - - _attribute_map = { - 'input_path_type': {'key': 'inputPathType', 'type': 'str'}, - 'url': {'key': 'url', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword url: Input path url. - :paramtype url: str - """ - super(PackageInputPathUrl, self).__init__(**kwargs) - self.input_path_type = 'Url' # type: str - self.url = kwargs.get('url', None) - - -class PackageInputPathVersion(PackageInputPathBase): - """Package input path specified with name and version. - - All required parameters must be populated in order to send to Azure. - - :ivar input_path_type: Required. [Required] Input path type for package inputs.Constant filled - by server. Possible values include: "Url", "PathId", "PathVersion". - :vartype input_path_type: str or ~azure.mgmt.machinelearningservices.models.InputPathType - :ivar resource_name: Input resource name. - :vartype resource_name: str - :ivar resource_version: Input resource version. - :vartype resource_version: str - """ - - _validation = { - 'input_path_type': {'required': True}, - } - - _attribute_map = { - 'input_path_type': {'key': 'inputPathType', 'type': 'str'}, - 'resource_name': {'key': 'resourceName', 'type': 'str'}, - 'resource_version': {'key': 'resourceVersion', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword resource_name: Input resource name. - :paramtype resource_name: str - :keyword resource_version: Input resource version. - :paramtype resource_version: str - """ - super(PackageInputPathVersion, self).__init__(**kwargs) - self.input_path_type = 'PathVersion' # type: str - self.resource_name = kwargs.get('resource_name', None) - self.resource_version = kwargs.get('resource_version', None) - - -class PackageRequest(msrest.serialization.Model): - """Model package operation request properties. - - All required parameters must be populated in order to send to Azure. - - :ivar base_environment_source: Base environment to start with. - :vartype base_environment_source: - ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSource - :ivar environment_variables: Collection of environment variables. - :vartype environment_variables: dict[str, str] - :ivar inferencing_server: Required. [Required] Inferencing server configurations. - :vartype inferencing_server: ~azure.mgmt.machinelearningservices.models.InferencingServer - :ivar inputs: Collection of inputs. - :vartype inputs: list[~azure.mgmt.machinelearningservices.models.ModelPackageInput] - :ivar model_configuration: Model configuration including the mount mode. - :vartype model_configuration: ~azure.mgmt.machinelearningservices.models.ModelConfiguration - :ivar properties: Property dictionary. Properties can be added, removed, and updated. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar target_environment_id: Required. [Required] Arm ID of the target environment to be - created by package operation. - :vartype target_environment_id: str - """ - - _validation = { - 'inferencing_server': {'required': True}, - 'target_environment_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'base_environment_source': {'key': 'baseEnvironmentSource', 'type': 'BaseEnvironmentSource'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'inferencing_server': {'key': 'inferencingServer', 'type': 'InferencingServer'}, - 'inputs': {'key': 'inputs', 'type': '[ModelPackageInput]'}, - 'model_configuration': {'key': 'modelConfiguration', 'type': 'ModelConfiguration'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'target_environment_id': {'key': 'targetEnvironmentId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword base_environment_source: Base environment to start with. - :paramtype base_environment_source: - ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSource - :keyword environment_variables: Collection of environment variables. - :paramtype environment_variables: dict[str, str] - :keyword inferencing_server: Required. [Required] Inferencing server configurations. - :paramtype inferencing_server: ~azure.mgmt.machinelearningservices.models.InferencingServer - :keyword inputs: Collection of inputs. - :paramtype inputs: list[~azure.mgmt.machinelearningservices.models.ModelPackageInput] - :keyword model_configuration: Model configuration including the mount mode. - :paramtype model_configuration: ~azure.mgmt.machinelearningservices.models.ModelConfiguration - :keyword properties: Property dictionary. Properties can be added, removed, and updated. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword target_environment_id: Required. [Required] Arm ID of the target environment to be - created by package operation. - :paramtype target_environment_id: str - """ - super(PackageRequest, self).__init__(**kwargs) - self.base_environment_source = kwargs.get('base_environment_source', None) - self.environment_variables = kwargs.get('environment_variables', None) - self.inferencing_server = kwargs['inferencing_server'] - self.inputs = kwargs.get('inputs', None) - self.model_configuration = kwargs.get('model_configuration', None) - self.properties = kwargs.get('properties', None) - self.tags = kwargs.get('tags', None) - self.target_environment_id = kwargs['target_environment_id'] - - -class PackageResponse(msrest.serialization.Model): - """Package response returned after async package operation completes successfully. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar base_environment_source: Base environment to start with. - :vartype base_environment_source: - ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSource - :ivar build_id: Build id of the image build operation. - :vartype build_id: str - :ivar build_state: Build state of the image build operation. Possible values include: - "NotStarted", "Running", "Succeeded", "Failed". - :vartype build_state: str or ~azure.mgmt.machinelearningservices.models.PackageBuildState - :ivar environment_variables: Collection of environment variables. - :vartype environment_variables: dict[str, str] - :ivar inferencing_server: Inferencing server configurations. - :vartype inferencing_server: ~azure.mgmt.machinelearningservices.models.InferencingServer - :ivar inputs: Collection of inputs. - :vartype inputs: list[~azure.mgmt.machinelearningservices.models.ModelPackageInput] - :ivar log_url: Log url of the image build operation. - :vartype log_url: str - :ivar model_configuration: Model configuration including the mount mode. - :vartype model_configuration: ~azure.mgmt.machinelearningservices.models.ModelConfiguration - :ivar properties: Property dictionary. Tags can be added, removed, and updated. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar target_environment_id: Asset ID of the target environment created by package operation. - :vartype target_environment_id: str - """ - - _validation = { - 'base_environment_source': {'readonly': True}, - 'build_id': {'readonly': True}, - 'build_state': {'readonly': True}, - 'environment_variables': {'readonly': True}, - 'inferencing_server': {'readonly': True}, - 'inputs': {'readonly': True}, - 'log_url': {'readonly': True}, - 'model_configuration': {'readonly': True}, - 'properties': {'readonly': True}, - 'tags': {'readonly': True}, - 'target_environment_id': {'readonly': True}, - } - - _attribute_map = { - 'base_environment_source': {'key': 'baseEnvironmentSource', 'type': 'BaseEnvironmentSource'}, - 'build_id': {'key': 'buildId', 'type': 'str'}, - 'build_state': {'key': 'buildState', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'inferencing_server': {'key': 'inferencingServer', 'type': 'InferencingServer'}, - 'inputs': {'key': 'inputs', 'type': '[ModelPackageInput]'}, - 'log_url': {'key': 'logUrl', 'type': 'str'}, - 'model_configuration': {'key': 'modelConfiguration', 'type': 'ModelConfiguration'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'target_environment_id': {'key': 'targetEnvironmentId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(PackageResponse, self).__init__(**kwargs) - self.base_environment_source = None - self.build_id = None - self.build_state = None - self.environment_variables = None - self.inferencing_server = None - self.inputs = None - self.log_url = None - self.model_configuration = None - self.properties = None - self.tags = None - self.target_environment_id = None - - -class PaginatedComputeResourcesList(msrest.serialization.Model): - """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope. - - :ivar value: An array of Machine Learning compute objects wrapped in ARM resource envelope. - :vartype value: list[~azure.mgmt.machinelearningservices.models.ComputeResource] - :ivar next_link: A continuation link (absolute URI) to the next page of results in the list. - :vartype next_link: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[ComputeResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword value: An array of Machine Learning compute objects wrapped in ARM resource envelope. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComputeResource] - :keyword next_link: A continuation link (absolute URI) to the next page of results in the list. - :paramtype next_link: str - """ - super(PaginatedComputeResourcesList, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.next_link = kwargs.get('next_link', None) - - -class PartialBatchDeployment(msrest.serialization.Model): - """Mutable batch inference settings per deployment. - - :ivar description: Description of the endpoint deployment. - :vartype description: str - """ - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: Description of the endpoint deployment. - :paramtype description: str - """ - super(PartialBatchDeployment, self).__init__(**kwargs) - self.description = kwargs.get('description', None) - - -class PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties(msrest.serialization.Model): - """Strictly used in update requests. - - :ivar properties: Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.PartialBatchDeployment - :ivar tags: A set of tags. Resource tags. - :vartype tags: dict[str, str] - """ - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'PartialBatchDeployment'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.PartialBatchDeployment - :keyword tags: A set of tags. Resource tags. - :paramtype tags: dict[str, str] - """ - super(PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.tags = kwargs.get('tags', None) - - -class PartialJobBase(msrest.serialization.Model): - """Mutable base definition for a job. - - :ivar notification_setting: Mutable notification setting for the job. - :vartype notification_setting: - ~azure.mgmt.machinelearningservices.models.PartialNotificationSetting - """ - - _attribute_map = { - 'notification_setting': {'key': 'notificationSetting', 'type': 'PartialNotificationSetting'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword notification_setting: Mutable notification setting for the job. - :paramtype notification_setting: - ~azure.mgmt.machinelearningservices.models.PartialNotificationSetting - """ - super(PartialJobBase, self).__init__(**kwargs) - self.notification_setting = kwargs.get('notification_setting', None) - - -class PartialJobBasePartialResource(msrest.serialization.Model): - """Azure Resource Manager resource envelope strictly used in update requests. - - :ivar properties: Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.PartialJobBase - """ - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'PartialJobBase'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.PartialJobBase - """ - super(PartialJobBasePartialResource, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class PartialManagedServiceIdentity(msrest.serialization.Model): - """Managed service identity (system assigned and/or user assigned identities). - - :ivar type: Managed service identity (system assigned and/or user assigned identities). - Possible values include: "None", "SystemAssigned", "UserAssigned", - "SystemAssigned,UserAssigned". - :vartype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType - :ivar user_assigned_identities: The set of user assigned identities associated with the - resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. - The dictionary values can be empty objects ({}) in requests. - :vartype user_assigned_identities: dict[str, any] - """ - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{object}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword type: Managed service identity (system assigned and/or user assigned identities). - Possible values include: "None", "SystemAssigned", "UserAssigned", - "SystemAssigned,UserAssigned". - :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType - :keyword user_assigned_identities: The set of user assigned identities associated with the - resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. - The dictionary values can be empty objects ({}) in requests. - :paramtype user_assigned_identities: dict[str, any] - """ - super(PartialManagedServiceIdentity, self).__init__(**kwargs) - self.type = kwargs.get('type', None) - self.user_assigned_identities = kwargs.get('user_assigned_identities', None) - - -class PartialMinimalTrackedResource(msrest.serialization.Model): - """Strictly used in update requests. - - :ivar tags: A set of tags. Resource tags. - :vartype tags: dict[str, str] - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword tags: A set of tags. Resource tags. - :paramtype tags: dict[str, str] - """ - super(PartialMinimalTrackedResource, self).__init__(**kwargs) - self.tags = kwargs.get('tags', None) - - -class PartialMinimalTrackedResourceWithIdentity(PartialMinimalTrackedResource): - """Strictly used in update requests. - - :ivar tags: A set of tags. Resource tags. - :vartype tags: dict[str, str] - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'PartialManagedServiceIdentity'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword tags: A set of tags. Resource tags. - :paramtype tags: dict[str, str] - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity - """ - super(PartialMinimalTrackedResourceWithIdentity, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - - -class PartialMinimalTrackedResourceWithSku(PartialMinimalTrackedResource): - """Strictly used in update requests. - - :ivar tags: A set of tags. Resource tags. - :vartype tags: dict[str, str] - :ivar sku: Sku details required for ARM contract for Autoscaling. - :vartype sku: ~azure.mgmt.machinelearningservices.models.PartialSku - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'PartialSku'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword tags: A set of tags. Resource tags. - :paramtype tags: dict[str, str] - :keyword sku: Sku details required for ARM contract for Autoscaling. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.PartialSku - """ - super(PartialMinimalTrackedResourceWithSku, self).__init__(**kwargs) - self.sku = kwargs.get('sku', None) - - -class PartialMinimalTrackedResourceWithSkuAndIdentity(PartialMinimalTrackedResource): - """Strictly used in update requests. - - :ivar tags: A set of tags. Resource tags. - :vartype tags: dict[str, str] - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity - :ivar sku: Sku details required for ARM contract for Autoscaling. - :vartype sku: ~azure.mgmt.machinelearningservices.models.PartialSku - """ - - _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'PartialManagedServiceIdentity'}, - 'sku': {'key': 'sku', 'type': 'PartialSku'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword tags: A set of tags. Resource tags. - :paramtype tags: dict[str, str] - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity - :keyword sku: Sku details required for ARM contract for Autoscaling. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.PartialSku - """ - super(PartialMinimalTrackedResourceWithSkuAndIdentity, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.sku = kwargs.get('sku', None) - - -class PartialNotificationSetting(msrest.serialization.Model): - """Mutable configuration for notification. - - :ivar webhooks: Send webhook callback to a service. Key is a user-provided name for the - webhook. - :vartype webhooks: dict[str, ~azure.mgmt.machinelearningservices.models.Webhook] - """ - - _attribute_map = { - 'webhooks': {'key': 'webhooks', 'type': '{Webhook}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword webhooks: Send webhook callback to a service. Key is a user-provided name for the - webhook. - :paramtype webhooks: dict[str, ~azure.mgmt.machinelearningservices.models.Webhook] - """ - super(PartialNotificationSetting, self).__init__(**kwargs) - self.webhooks = kwargs.get('webhooks', None) - - -class PartialRegistryPartialTrackedResource(msrest.serialization.Model): - """Strictly used in update requests. - - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: - ~azure.mgmt.machinelearningservices.models.RegistryPartialManagedServiceIdentity - :ivar sku: Sku details required for ARM contract for Autoscaling. - :vartype sku: ~azure.mgmt.machinelearningservices.models.PartialSku - :ivar tags: A set of tags. Resource tags. - :vartype tags: dict[str, str] - """ - - _attribute_map = { - 'identity': {'key': 'identity', 'type': 'RegistryPartialManagedServiceIdentity'}, - 'sku': {'key': 'sku', 'type': 'PartialSku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: - ~azure.mgmt.machinelearningservices.models.RegistryPartialManagedServiceIdentity - :keyword sku: Sku details required for ARM contract for Autoscaling. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.PartialSku - :keyword tags: A set of tags. Resource tags. - :paramtype tags: dict[str, str] - """ - super(PartialRegistryPartialTrackedResource, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.sku = kwargs.get('sku', None) - self.tags = kwargs.get('tags', None) - - -class PartialSku(msrest.serialization.Model): - """Common SKU definition. - - :ivar capacity: If the SKU supports scale out/in then the capacity integer should be included. - If scale out/in is not possible for the resource this may be omitted. - :vartype capacity: int - :ivar family: If the service has different generations of hardware, for the same SKU, then that - can be captured here. - :vartype family: str - :ivar name: The name of the SKU. Ex - P3. It is typically a letter+number code. - :vartype name: str - :ivar size: The SKU size. When the name field is the combination of tier and some other value, - this would be the standalone code. - :vartype size: str - :ivar tier: This field is required to be implemented by the Resource Provider if the service - has more than one tier, but is not required on a PUT. Possible values include: "Free", "Basic", - "Standard", "Premium". - :vartype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier - """ - - _attribute_map = { - 'capacity': {'key': 'capacity', 'type': 'int'}, - 'family': {'key': 'family', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword capacity: If the SKU supports scale out/in then the capacity integer should be - included. If scale out/in is not possible for the resource this may be omitted. - :paramtype capacity: int - :keyword family: If the service has different generations of hardware, for the same SKU, then - that can be captured here. - :paramtype family: str - :keyword name: The name of the SKU. Ex - P3. It is typically a letter+number code. - :paramtype name: str - :keyword size: The SKU size. When the name field is the combination of tier and some other - value, this would be the standalone code. - :paramtype size: str - :keyword tier: This field is required to be implemented by the Resource Provider if the service - has more than one tier, but is not required on a PUT. Possible values include: "Free", "Basic", - "Standard", "Premium". - :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier - """ - super(PartialSku, self).__init__(**kwargs) - self.capacity = kwargs.get('capacity', None) - self.family = kwargs.get('family', None) - self.name = kwargs.get('name', None) - self.size = kwargs.get('size', None) - self.tier = kwargs.get('tier', None) - - -class Password(msrest.serialization.Model): - """Password. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: - :vartype name: str - :ivar value: - :vartype value: str - """ - - _validation = { - 'name': {'readonly': True}, - 'value': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(Password, self).__init__(**kwargs) - self.name = None - self.value = None - - -class PATAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): - """PATAuthTypeWorkspaceConnectionProperties. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar expiry_time: - :vartype expiry_time: ~datetime.datetime - :ivar metadata: Any object. - :vartype metadata: any - :ivar target: - :vartype target: str - :ivar credentials: - :vartype credentials: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPersonalAccessToken - """ - - _validation = { - 'auth_type': {'required': True}, - } - - _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionPersonalAccessToken'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :keyword expiry_time: - :paramtype expiry_time: ~datetime.datetime - :keyword metadata: Any object. - :paramtype metadata: any - :keyword target: - :paramtype target: str - :keyword credentials: - :paramtype credentials: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPersonalAccessToken - """ - super(PATAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'PAT' # type: str - self.credentials = kwargs.get('credentials', None) - - -class PendingUploadCredentialDto(msrest.serialization.Model): - """PendingUploadCredentialDto. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SASCredentialDto. - - All required parameters must be populated in order to send to Azure. - - :ivar credential_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "SAS". - :vartype credential_type: str or - ~azure.mgmt.machinelearningservices.models.PendingUploadCredentialType - """ - - _validation = { - 'credential_type': {'required': True}, - } - - _attribute_map = { - 'credential_type': {'key': 'credentialType', 'type': 'str'}, - } - - _subtype_map = { - 'credential_type': {'SAS': 'SASCredentialDto'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(PendingUploadCredentialDto, self).__init__(**kwargs) - self.credential_type = None # type: Optional[str] - - -class PendingUploadRequestDto(msrest.serialization.Model): - """PendingUploadRequestDto. - - :ivar pending_upload_id: If PendingUploadId = null then random guid will be used. - :vartype pending_upload_id: str - :ivar pending_upload_type: TemporaryBlobReference is the only supported type. Possible values - include: "None", "TemporaryBlobReference". - :vartype pending_upload_type: str or - ~azure.mgmt.machinelearningservices.models.PendingUploadType - """ - - _attribute_map = { - 'pending_upload_id': {'key': 'pendingUploadId', 'type': 'str'}, - 'pending_upload_type': {'key': 'pendingUploadType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword pending_upload_id: If PendingUploadId = null then random guid will be used. - :paramtype pending_upload_id: str - :keyword pending_upload_type: TemporaryBlobReference is the only supported type. Possible - values include: "None", "TemporaryBlobReference". - :paramtype pending_upload_type: str or - ~azure.mgmt.machinelearningservices.models.PendingUploadType - """ - super(PendingUploadRequestDto, self).__init__(**kwargs) - self.pending_upload_id = kwargs.get('pending_upload_id', None) - self.pending_upload_type = kwargs.get('pending_upload_type', None) - - -class PendingUploadResponseDto(msrest.serialization.Model): - """PendingUploadResponseDto. - - :ivar blob_reference_for_consumption: Container level read, write, list SAS. - :vartype blob_reference_for_consumption: - ~azure.mgmt.machinelearningservices.models.BlobReferenceForConsumptionDto - :ivar pending_upload_id: ID for this upload request. - :vartype pending_upload_id: str - :ivar pending_upload_type: TemporaryBlobReference is the only supported type. Possible values - include: "None", "TemporaryBlobReference". - :vartype pending_upload_type: str or - ~azure.mgmt.machinelearningservices.models.PendingUploadType - """ - - _attribute_map = { - 'blob_reference_for_consumption': {'key': 'blobReferenceForConsumption', 'type': 'BlobReferenceForConsumptionDto'}, - 'pending_upload_id': {'key': 'pendingUploadId', 'type': 'str'}, - 'pending_upload_type': {'key': 'pendingUploadType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword blob_reference_for_consumption: Container level read, write, list SAS. - :paramtype blob_reference_for_consumption: - ~azure.mgmt.machinelearningservices.models.BlobReferenceForConsumptionDto - :keyword pending_upload_id: ID for this upload request. - :paramtype pending_upload_id: str - :keyword pending_upload_type: TemporaryBlobReference is the only supported type. Possible - values include: "None", "TemporaryBlobReference". - :paramtype pending_upload_type: str or - ~azure.mgmt.machinelearningservices.models.PendingUploadType - """ - super(PendingUploadResponseDto, self).__init__(**kwargs) - self.blob_reference_for_consumption = kwargs.get('blob_reference_for_consumption', None) - self.pending_upload_id = kwargs.get('pending_upload_id', None) - self.pending_upload_type = kwargs.get('pending_upload_type', None) - - -class PersonalComputeInstanceSettings(msrest.serialization.Model): - """Settings for a personal compute instance. - - :ivar assigned_user: A user explicitly assigned to a personal compute instance. - :vartype assigned_user: ~azure.mgmt.machinelearningservices.models.AssignedUser - """ - - _attribute_map = { - 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword assigned_user: A user explicitly assigned to a personal compute instance. - :paramtype assigned_user: ~azure.mgmt.machinelearningservices.models.AssignedUser - """ - super(PersonalComputeInstanceSettings, self).__init__(**kwargs) - self.assigned_user = kwargs.get('assigned_user', None) - - -class PipelineJob(JobBaseProperties): - """Pipeline Job definition: defines generic to MFE attributes. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar component_id: ARM resource ID of the component resource. - :vartype component_id: str - :ivar compute_id: ARM resource ID of the compute resource. - :vartype compute_id: str - :ivar display_name: Display name of job. - :vartype display_name: str - :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :vartype experiment_name: str - :ivar identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". - :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType - :ivar notification_setting: Notification setting for the job. - :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :ivar secrets_configuration: Configuration for secrets to be made available during runtime. - :vartype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :ivar services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus - :ivar inputs: Inputs for the pipeline job. - :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] - :ivar jobs: Jobs construct the Pipeline Job. - :vartype jobs: dict[str, any] - :ivar outputs: Outputs for the pipeline job. - :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] - :ivar settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. - :vartype settings: any - :ivar source_job_id: ARM resource ID of source job. - :vartype source_job_id: str - """ - - _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'jobs': {'key': 'jobs', 'type': '{object}'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'settings': {'key': 'settings', 'type': 'object'}, - 'source_job_id': {'key': 'sourceJobId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword component_id: ARM resource ID of the component resource. - :paramtype component_id: str - :keyword compute_id: ARM resource ID of the compute resource. - :paramtype compute_id: str - :keyword display_name: Display name of job. - :paramtype display_name: str - :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :paramtype experiment_name: str - :keyword identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - :keyword notification_setting: Notification setting for the job. - :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :keyword secrets_configuration: Configuration for secrets to be made available during runtime. - :paramtype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :keyword services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :keyword inputs: Inputs for the pipeline job. - :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] - :keyword jobs: Jobs construct the Pipeline Job. - :paramtype jobs: dict[str, any] - :keyword outputs: Outputs for the pipeline job. - :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] - :keyword settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. - :paramtype settings: any - :keyword source_job_id: ARM resource ID of source job. - :paramtype source_job_id: str - """ - super(PipelineJob, self).__init__(**kwargs) - self.job_type = 'Pipeline' # type: str - self.inputs = kwargs.get('inputs', None) - self.jobs = kwargs.get('jobs', None) - self.outputs = kwargs.get('outputs', None) - self.settings = kwargs.get('settings', None) - self.source_job_id = kwargs.get('source_job_id', None) - - -class PredictionDriftMonitoringSignal(MonitoringSignalBase): - """PredictionDriftMonitoringSignal. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", - "Enabled". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :ivar properties: Property dictionary. Properties can be added, but not removed or altered. - :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". - :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType - :ivar metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. - :vartype metric_thresholds: - list[~azure.mgmt.machinelearningservices.models.PredictionDriftMetricThresholdBase] - :ivar model_type: Required. [Required] The type of the model monitored. Possible values - include: "Classification", "Regression". - :vartype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType - :ivar production_data: Required. [Required] The data which drift will be calculated for. - :vartype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :ivar reference_data: Required. [Required] The data to calculate drift against. - :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - """ - - _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'model_type': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[PredictionDriftMetricThresholdBase]'}, - 'model_type': {'key': 'modelType', 'type': 'str'}, - 'production_data': {'key': 'productionData', 'type': 'MonitoringInputDataBase'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode - :keyword properties: Property dictionary. Properties can be added, but not removed or altered. - :paramtype properties: dict[str, str] - :keyword metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. - :paramtype metric_thresholds: - list[~azure.mgmt.machinelearningservices.models.PredictionDriftMetricThresholdBase] - :keyword model_type: Required. [Required] The type of the model monitored. Possible values - include: "Classification", "Regression". - :paramtype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType - :keyword production_data: Required. [Required] The data which drift will be calculated for. - :paramtype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :keyword reference_data: Required. [Required] The data to calculate drift against. - :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - """ - super(PredictionDriftMonitoringSignal, self).__init__(**kwargs) - self.signal_type = 'PredictionDrift' # type: str - self.metric_thresholds = kwargs['metric_thresholds'] - self.model_type = kwargs['model_type'] - self.production_data = kwargs['production_data'] - self.reference_data = kwargs['reference_data'] - - -class PrivateEndpoint(msrest.serialization.Model): - """The Private Endpoint resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The ARM identifier for Private Endpoint. - :vartype id: str - """ - - _validation = { - 'id': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(PrivateEndpoint, self).__init__(**kwargs) - self.id = None - - -class PrivateEndpointConnection(Resource): - """The Private Endpoint Connection resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar location: Same as workspace location. - :vartype location: str - :ivar sku: Optional. This field is required to be implemented by the RP because AML is - supporting more than one tier. - :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar tags: A set of tags. Dictionary of :code:``. - :vartype tags: dict[str, str] - :ivar private_endpoint: The Private Endpoint resource. - :vartype private_endpoint: - ~azure.mgmt.machinelearningservices.models.WorkspacePrivateEndpointResource - :ivar private_link_service_connection_state: The connection state. - :vartype private_link_service_connection_state: - ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState - :ivar provisioning_state: The current provisioning state. Possible values include: "Succeeded", - "Creating", "Deleting", "Failed". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionProvisioningState - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'WorkspacePrivateEndpointResource'}, - 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword location: Same as workspace location. - :paramtype location: str - :keyword sku: Optional. This field is required to be implemented by the RP because AML is - supporting more than one tier. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - :keyword tags: A set of tags. Dictionary of :code:``. - :paramtype tags: dict[str, str] - :keyword private_endpoint: The Private Endpoint resource. - :paramtype private_endpoint: - ~azure.mgmt.machinelearningservices.models.WorkspacePrivateEndpointResource - :keyword private_link_service_connection_state: The connection state. - :paramtype private_link_service_connection_state: - ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState - """ - super(PrivateEndpointConnection, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.location = kwargs.get('location', None) - self.sku = kwargs.get('sku', None) - self.tags = kwargs.get('tags', None) - self.private_endpoint = kwargs.get('private_endpoint', None) - self.private_link_service_connection_state = kwargs.get('private_link_service_connection_state', None) - self.provisioning_state = None - - -class PrivateEndpointConnectionListResult(msrest.serialization.Model): - """List of private endpoint connection associated with the specified workspace. - - :ivar value: Array of private endpoint connections. - :vartype value: list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword value: Array of private endpoint connections. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] - """ - super(PrivateEndpointConnectionListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class PrivateEndpointDestination(msrest.serialization.Model): - """Private Endpoint destination for a Private Endpoint Outbound Rule for the managed network of a machine learning workspace. - - :ivar service_resource_id: - :vartype service_resource_id: str - :ivar spark_enabled: - :vartype spark_enabled: bool - :ivar spark_status: Type of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Inactive", "Active". - :vartype spark_status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus - :ivar subresource_target: - :vartype subresource_target: str - """ - - _attribute_map = { - 'service_resource_id': {'key': 'serviceResourceId', 'type': 'str'}, - 'spark_enabled': {'key': 'sparkEnabled', 'type': 'bool'}, - 'spark_status': {'key': 'sparkStatus', 'type': 'str'}, - 'subresource_target': {'key': 'subresourceTarget', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword service_resource_id: - :paramtype service_resource_id: str - :keyword spark_enabled: - :paramtype spark_enabled: bool - :keyword spark_status: Type of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Inactive", "Active". - :paramtype spark_status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus - :keyword subresource_target: - :paramtype subresource_target: str - """ - super(PrivateEndpointDestination, self).__init__(**kwargs) - self.service_resource_id = kwargs.get('service_resource_id', None) - self.spark_enabled = kwargs.get('spark_enabled', None) - self.spark_status = kwargs.get('spark_status', None) - self.subresource_target = kwargs.get('subresource_target', None) - - -class PrivateEndpointOutboundRule(OutboundRule): - """Private Endpoint Outbound Rule for the managed network of a machine learning workspace. - - All required parameters must be populated in order to send to Azure. - - :ivar category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :ivar status: Type of a managed network Outbound Rule of a machine learning workspace. Possible - values include: "Inactive", "Active". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus - :ivar type: Required. Type of a managed network Outbound Rule of a machine learning - workspace.Constant filled by server. Possible values include: "FQDN", "PrivateEndpoint", - "ServiceTag". - :vartype type: str or ~azure.mgmt.machinelearningservices.models.RuleType - :ivar destination: Private Endpoint destination for a Private Endpoint Outbound Rule for the - managed network of a machine learning workspace. - :vartype destination: ~azure.mgmt.machinelearningservices.models.PrivateEndpointDestination - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'destination': {'key': 'destination', 'type': 'PrivateEndpointDestination'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :keyword status: Type of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Inactive", "Active". - :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus - :keyword destination: Private Endpoint destination for a Private Endpoint Outbound Rule for the - managed network of a machine learning workspace. - :paramtype destination: ~azure.mgmt.machinelearningservices.models.PrivateEndpointDestination - """ - super(PrivateEndpointOutboundRule, self).__init__(**kwargs) - self.type = 'PrivateEndpoint' # type: str - self.destination = kwargs.get('destination', None) - - -class PrivateEndpointResource(PrivateEndpoint): - """The PE network resource that is linked to this PE connection. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: The ARM identifier for Private Endpoint. - :vartype id: str - :ivar subnet_arm_id: The subnetId that the private endpoint is connected to. - :vartype subnet_arm_id: str - """ - - _validation = { - 'id': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword subnet_arm_id: The subnetId that the private endpoint is connected to. - :paramtype subnet_arm_id: str - """ - super(PrivateEndpointResource, self).__init__(**kwargs) - self.subnet_arm_id = kwargs.get('subnet_arm_id', None) - - -class PrivateLinkResource(Resource): - """A private link resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar location: Same as workspace location. - :vartype location: str - :ivar sku: Optional. This field is required to be implemented by the RP because AML is - supporting more than one tier. - :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar tags: A set of tags. Dictionary of :code:``. - :vartype tags: dict[str, str] - :ivar group_id: The private link resource group id. - :vartype group_id: str - :ivar required_members: The private link resource required member names. - :vartype required_members: list[str] - :ivar required_zone_names: The private link resource Private link DNS zone name. - :vartype required_zone_names: list[str] - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, - 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword location: Same as workspace location. - :paramtype location: str - :keyword sku: Optional. This field is required to be implemented by the RP because AML is - supporting more than one tier. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - :keyword tags: A set of tags. Dictionary of :code:``. - :paramtype tags: dict[str, str] - :keyword group_id: The private link resource group id. - :paramtype group_id: str - :keyword required_members: The private link resource required member names. - :paramtype required_members: list[str] - :keyword required_zone_names: The private link resource Private link DNS zone name. - :paramtype required_zone_names: list[str] - """ - super(PrivateLinkResource, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.location = kwargs.get('location', None) - self.sku = kwargs.get('sku', None) - self.tags = kwargs.get('tags', None) - self.group_id = kwargs.get('group_id', None) - self.required_members = kwargs.get('required_members', None) - self.required_zone_names = kwargs.get('required_zone_names', None) - - -class PrivateLinkResourceListResult(msrest.serialization.Model): - """A list of private link resources. - - :ivar value: - :vartype value: list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword value: - :paramtype value: list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] - """ - super(PrivateLinkResourceListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class PrivateLinkServiceConnectionState(msrest.serialization.Model): - """A collection of information about the state of the connection between service consumer and provider. - - :ivar actions_required: Some RP chose "None". Other RPs use this for region expansion. - :vartype actions_required: str - :ivar description: User-defined message that, per NRP doc, may be used for approval-related - message. - :vartype description: str - :ivar status: Connection status of the service consumer with the service provider. Possible - values include: "Approved", "Pending", "Rejected", "Disconnected", "Timeout". - :vartype status: str or - ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus - """ - - _attribute_map = { - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword actions_required: Some RP chose "None". Other RPs use this for region expansion. - :paramtype actions_required: str - :keyword description: User-defined message that, per NRP doc, may be used for approval-related - message. - :paramtype description: str - :keyword status: Connection status of the service consumer with the service provider. Possible - values include: "Approved", "Pending", "Rejected", "Disconnected", "Timeout". - :paramtype status: str or - ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus - """ - super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) - self.actions_required = kwargs.get('actions_required', None) - self.description = kwargs.get('description', None) - self.status = kwargs.get('status', None) - - -class ProbeSettings(msrest.serialization.Model): - """Deployment container liveness/readiness probe configuration. - - :ivar failure_threshold: The number of failures to allow before returning an unhealthy status. - :vartype failure_threshold: int - :ivar initial_delay: The delay before the first probe in ISO 8601 format. - :vartype initial_delay: ~datetime.timedelta - :ivar period: The length of time between probes in ISO 8601 format. - :vartype period: ~datetime.timedelta - :ivar success_threshold: The number of successful probes before returning a healthy status. - :vartype success_threshold: int - :ivar timeout: The probe timeout in ISO 8601 format. - :vartype timeout: ~datetime.timedelta - """ - - _attribute_map = { - 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'}, - 'initial_delay': {'key': 'initialDelay', 'type': 'duration'}, - 'period': {'key': 'period', 'type': 'duration'}, - 'success_threshold': {'key': 'successThreshold', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword failure_threshold: The number of failures to allow before returning an unhealthy - status. - :paramtype failure_threshold: int - :keyword initial_delay: The delay before the first probe in ISO 8601 format. - :paramtype initial_delay: ~datetime.timedelta - :keyword period: The length of time between probes in ISO 8601 format. - :paramtype period: ~datetime.timedelta - :keyword success_threshold: The number of successful probes before returning a healthy status. - :paramtype success_threshold: int - :keyword timeout: The probe timeout in ISO 8601 format. - :paramtype timeout: ~datetime.timedelta - """ - super(ProbeSettings, self).__init__(**kwargs) - self.failure_threshold = kwargs.get('failure_threshold', 30) - self.initial_delay = kwargs.get('initial_delay', None) - self.period = kwargs.get('period', "PT10S") - self.success_threshold = kwargs.get('success_threshold', 1) - self.timeout = kwargs.get('timeout', "PT2S") - - -class ProgressMetrics(msrest.serialization.Model): - """Progress metrics definition. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar completed_datapoint_count: The completed datapoint count. - :vartype completed_datapoint_count: long - :ivar incremental_data_last_refresh_date_time: The time of last successful incremental data - refresh in UTC. - :vartype incremental_data_last_refresh_date_time: ~datetime.datetime - :ivar skipped_datapoint_count: The skipped datapoint count. - :vartype skipped_datapoint_count: long - :ivar total_datapoint_count: The total datapoint count. - :vartype total_datapoint_count: long - """ - - _validation = { - 'completed_datapoint_count': {'readonly': True}, - 'incremental_data_last_refresh_date_time': {'readonly': True}, - 'skipped_datapoint_count': {'readonly': True}, - 'total_datapoint_count': {'readonly': True}, - } - - _attribute_map = { - 'completed_datapoint_count': {'key': 'completedDatapointCount', 'type': 'long'}, - 'incremental_data_last_refresh_date_time': {'key': 'incrementalDataLastRefreshDateTime', 'type': 'iso-8601'}, - 'skipped_datapoint_count': {'key': 'skippedDatapointCount', 'type': 'long'}, - 'total_datapoint_count': {'key': 'totalDatapointCount', 'type': 'long'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ProgressMetrics, self).__init__(**kwargs) - self.completed_datapoint_count = None - self.incremental_data_last_refresh_date_time = None - self.skipped_datapoint_count = None - self.total_datapoint_count = None - - -class PyTorch(DistributionConfiguration): - """PyTorch distribution configuration. - - All required parameters must be populated in order to send to Azure. - - :ivar distribution_type: Required. [Required] Specifies the type of distribution - framework.Constant filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi", - "Ray". - :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType - :ivar process_count_per_instance: Number of processes per node. - :vartype process_count_per_instance: int - """ - - _validation = { - 'distribution_type': {'required': True}, - } - - _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, - 'process_count_per_instance': {'key': 'processCountPerInstance', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword process_count_per_instance: Number of processes per node. - :paramtype process_count_per_instance: int - """ - super(PyTorch, self).__init__(**kwargs) - self.distribution_type = 'PyTorch' # type: str - self.process_count_per_instance = kwargs.get('process_count_per_instance', None) - - -class QueueSettings(msrest.serialization.Model): - """QueueSettings. - - :ivar job_tier: Controls the compute job tier. Possible values include: "Null", "Spot", - "Basic", "Standard", "Premium". - :vartype job_tier: str or ~azure.mgmt.machinelearningservices.models.JobTier - :ivar priority: Controls the priority of the job on a compute. - :vartype priority: int - """ - - _attribute_map = { - 'job_tier': {'key': 'jobTier', 'type': 'str'}, - 'priority': {'key': 'priority', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword job_tier: Controls the compute job tier. Possible values include: "Null", "Spot", - "Basic", "Standard", "Premium". - :paramtype job_tier: str or ~azure.mgmt.machinelearningservices.models.JobTier - :keyword priority: Controls the priority of the job on a compute. - :paramtype priority: int - """ - super(QueueSettings, self).__init__(**kwargs) - self.job_tier = kwargs.get('job_tier', None) - self.priority = kwargs.get('priority', None) - - -class QuotaBaseProperties(msrest.serialization.Model): - """The properties for Quota update or retrieval. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar type: Specifies the resource type. - :vartype type: str - :ivar limit: The maximum permitted quota of the resource. - :vartype limit: long - :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword id: Specifies the resource ID. - :paramtype id: str - :keyword type: Specifies the resource type. - :paramtype type: str - :keyword limit: The maximum permitted quota of the resource. - :paramtype limit: long - :keyword unit: An enum describing the unit of quota measurement. Possible values include: - "Count". - :paramtype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit - """ - super(QuotaBaseProperties, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.type = kwargs.get('type', None) - self.limit = kwargs.get('limit', None) - self.unit = kwargs.get('unit', None) - - -class QuotaUpdateParameters(msrest.serialization.Model): - """Quota update parameters. - - :ivar value: The list for update quota. - :vartype value: list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] - :ivar location: Region of workspace quota to be updated. - :vartype location: str - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'}, - 'location': {'key': 'location', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword value: The list for update quota. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] - :keyword location: Region of workspace quota to be updated. - :paramtype location: str - """ - super(QuotaUpdateParameters, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - self.location = kwargs.get('location', None) - - -class RandomSamplingAlgorithm(SamplingAlgorithm): - """Defines a Sampling Algorithm that generates values randomly. - - All required parameters must be populated in order to send to Azure. - - :ivar sampling_algorithm_type: Required. [Required] The algorithm used for generating - hyperparameter values, along with configuration properties.Constant filled by server. Possible - values include: "Grid", "Random", "Bayesian". - :vartype sampling_algorithm_type: str or - ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType - :ivar logbase: An optional positive number or e in string format to be used as base for log - based random sampling. - :vartype logbase: str - :ivar rule: The specific type of random algorithm. Possible values include: "Random", "Sobol". - :vartype rule: str or ~azure.mgmt.machinelearningservices.models.RandomSamplingAlgorithmRule - :ivar seed: An optional integer to use as the seed for random number generation. - :vartype seed: int - """ - - _validation = { - 'sampling_algorithm_type': {'required': True}, - } - - _attribute_map = { - 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, - 'logbase': {'key': 'logbase', 'type': 'str'}, - 'rule': {'key': 'rule', 'type': 'str'}, - 'seed': {'key': 'seed', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword logbase: An optional positive number or e in string format to be used as base for log - based random sampling. - :paramtype logbase: str - :keyword rule: The specific type of random algorithm. Possible values include: "Random", - "Sobol". - :paramtype rule: str or ~azure.mgmt.machinelearningservices.models.RandomSamplingAlgorithmRule - :keyword seed: An optional integer to use as the seed for random number generation. - :paramtype seed: int - """ - super(RandomSamplingAlgorithm, self).__init__(**kwargs) - self.sampling_algorithm_type = 'Random' # type: str - self.logbase = kwargs.get('logbase', None) - self.rule = kwargs.get('rule', None) - self.seed = kwargs.get('seed', None) - - -class Ray(DistributionConfiguration): - """Ray distribution configuration. - - All required parameters must be populated in order to send to Azure. - - :ivar distribution_type: Required. [Required] Specifies the type of distribution - framework.Constant filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi", - "Ray". - :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType - :ivar address: The address of Ray head node. - :vartype address: str - :ivar dashboard_port: The port to bind the dashboard server to. - :vartype dashboard_port: int - :ivar head_node_additional_args: Additional arguments passed to ray start in head node. - :vartype head_node_additional_args: str - :ivar include_dashboard: Provide this argument to start the Ray dashboard GUI. - :vartype include_dashboard: bool - :ivar port: The port of the head ray process. - :vartype port: int - :ivar worker_node_additional_args: Additional arguments passed to ray start in worker node. - :vartype worker_node_additional_args: str - """ - - _validation = { - 'distribution_type': {'required': True}, - } - - _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, - 'address': {'key': 'address', 'type': 'str'}, - 'dashboard_port': {'key': 'dashboardPort', 'type': 'int'}, - 'head_node_additional_args': {'key': 'headNodeAdditionalArgs', 'type': 'str'}, - 'include_dashboard': {'key': 'includeDashboard', 'type': 'bool'}, - 'port': {'key': 'port', 'type': 'int'}, - 'worker_node_additional_args': {'key': 'workerNodeAdditionalArgs', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword address: The address of Ray head node. - :paramtype address: str - :keyword dashboard_port: The port to bind the dashboard server to. - :paramtype dashboard_port: int - :keyword head_node_additional_args: Additional arguments passed to ray start in head node. - :paramtype head_node_additional_args: str - :keyword include_dashboard: Provide this argument to start the Ray dashboard GUI. - :paramtype include_dashboard: bool - :keyword port: The port of the head ray process. - :paramtype port: int - :keyword worker_node_additional_args: Additional arguments passed to ray start in worker node. - :paramtype worker_node_additional_args: str - """ - super(Ray, self).__init__(**kwargs) - self.distribution_type = 'Ray' # type: str - self.address = kwargs.get('address', None) - self.dashboard_port = kwargs.get('dashboard_port', None) - self.head_node_additional_args = kwargs.get('head_node_additional_args', None) - self.include_dashboard = kwargs.get('include_dashboard', None) - self.port = kwargs.get('port', None) - self.worker_node_additional_args = kwargs.get('worker_node_additional_args', None) - - -class Recurrence(msrest.serialization.Model): - """The workflow trigger recurrence for ComputeStartStop schedule type. - - :ivar frequency: [Required] The frequency to trigger schedule. Possible values include: - "Minute", "Hour", "Day", "Week", "Month". - :vartype frequency: str or ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency - :ivar interval: [Required] Specifies schedule interval in conjunction with frequency. - :vartype interval: int - :ivar start_time: The start time in yyyy-MM-ddTHH:mm:ss format. - :vartype start_time: str - :ivar time_zone: Specifies time zone in which the schedule runs. - TimeZone should follow Windows time zone format. Refer: - https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. - :vartype time_zone: str - :ivar schedule: [Required] The recurrence schedule. - :vartype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule - """ - - _attribute_map = { - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'interval': {'key': 'interval', 'type': 'int'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword frequency: [Required] The frequency to trigger schedule. Possible values include: - "Minute", "Hour", "Day", "Week", "Month". - :paramtype frequency: str or ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency - :keyword interval: [Required] Specifies schedule interval in conjunction with frequency. - :paramtype interval: int - :keyword start_time: The start time in yyyy-MM-ddTHH:mm:ss format. - :paramtype start_time: str - :keyword time_zone: Specifies time zone in which the schedule runs. - TimeZone should follow Windows time zone format. Refer: - https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. - :paramtype time_zone: str - :keyword schedule: [Required] The recurrence schedule. - :paramtype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule - """ - super(Recurrence, self).__init__(**kwargs) - self.frequency = kwargs.get('frequency', None) - self.interval = kwargs.get('interval', None) - self.start_time = kwargs.get('start_time', None) - self.time_zone = kwargs.get('time_zone', "UTC") - self.schedule = kwargs.get('schedule', None) - - -class RecurrenceSchedule(msrest.serialization.Model): - """RecurrenceSchedule. - - All required parameters must be populated in order to send to Azure. - - :ivar hours: Required. [Required] List of hours for the schedule. - :vartype hours: list[int] - :ivar minutes: Required. [Required] List of minutes for the schedule. - :vartype minutes: list[int] - :ivar month_days: List of month days for the schedule. - :vartype month_days: list[int] - :ivar week_days: List of days for the schedule. - :vartype week_days: list[str or ~azure.mgmt.machinelearningservices.models.WeekDay] - """ - - _validation = { - 'hours': {'required': True}, - 'minutes': {'required': True}, - } - - _attribute_map = { - 'hours': {'key': 'hours', 'type': '[int]'}, - 'minutes': {'key': 'minutes', 'type': '[int]'}, - 'month_days': {'key': 'monthDays', 'type': '[int]'}, - 'week_days': {'key': 'weekDays', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword hours: Required. [Required] List of hours for the schedule. - :paramtype hours: list[int] - :keyword minutes: Required. [Required] List of minutes for the schedule. - :paramtype minutes: list[int] - :keyword month_days: List of month days for the schedule. - :paramtype month_days: list[int] - :keyword week_days: List of days for the schedule. - :paramtype week_days: list[str or ~azure.mgmt.machinelearningservices.models.WeekDay] - """ - super(RecurrenceSchedule, self).__init__(**kwargs) - self.hours = kwargs['hours'] - self.minutes = kwargs['minutes'] - self.month_days = kwargs.get('month_days', None) - self.week_days = kwargs.get('week_days', None) - - -class RecurrenceTrigger(TriggerBase): - """RecurrenceTrigger. - - All required parameters must be populated in order to send to Azure. - - :ivar end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer - https://en.wikipedia.org/wiki/ISO_8601. - Recommented format would be "2022-06-01T00:00:01" - If not present, the schedule will run indefinitely. - :vartype end_time: str - :ivar start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC - offset. - :vartype start_time: str - :ivar time_zone: Specifies time zone in which the schedule runs. - TimeZone should follow Windows time zone format. Refer: - https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. - :vartype time_zone: str - :ivar trigger_type: Required. [Required].Constant filled by server. Possible values include: - "Recurrence", "Cron". - :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType - :ivar frequency: Required. [Required] The frequency to trigger schedule. Possible values - include: "Minute", "Hour", "Day", "Week", "Month". - :vartype frequency: str or ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency - :ivar interval: Required. [Required] Specifies schedule interval in conjunction with frequency. - :vartype interval: int - :ivar schedule: The recurrence schedule. - :vartype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule - """ - - _validation = { - 'trigger_type': {'required': True}, - 'frequency': {'required': True}, - 'interval': {'required': True}, - } - - _attribute_map = { - 'end_time': {'key': 'endTime', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'interval': {'key': 'interval', 'type': 'int'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer - https://en.wikipedia.org/wiki/ISO_8601. - Recommented format would be "2022-06-01T00:00:01" - If not present, the schedule will run indefinitely. - :paramtype end_time: str - :keyword start_time: Specifies start time of schedule in ISO 8601 format, but without a UTC - offset. - :paramtype start_time: str - :keyword time_zone: Specifies time zone in which the schedule runs. - TimeZone should follow Windows time zone format. Refer: - https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. - :paramtype time_zone: str - :keyword frequency: Required. [Required] The frequency to trigger schedule. Possible values - include: "Minute", "Hour", "Day", "Week", "Month". - :paramtype frequency: str or ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency - :keyword interval: Required. [Required] Specifies schedule interval in conjunction with - frequency. - :paramtype interval: int - :keyword schedule: The recurrence schedule. - :paramtype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule - """ - super(RecurrenceTrigger, self).__init__(**kwargs) - self.trigger_type = 'Recurrence' # type: str - self.frequency = kwargs['frequency'] - self.interval = kwargs['interval'] - self.schedule = kwargs.get('schedule', None) - - -class RegenerateEndpointKeysRequest(msrest.serialization.Model): - """RegenerateEndpointKeysRequest. - - All required parameters must be populated in order to send to Azure. - - :ivar key_type: Required. [Required] Specification for which type of key to generate. Primary - or Secondary. Possible values include: "Primary", "Secondary". - :vartype key_type: str or ~azure.mgmt.machinelearningservices.models.KeyType - :ivar key_value: The value the key is set to. - :vartype key_value: str - """ - - _validation = { - 'key_type': {'required': True}, - } - - _attribute_map = { - 'key_type': {'key': 'keyType', 'type': 'str'}, - 'key_value': {'key': 'keyValue', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword key_type: Required. [Required] Specification for which type of key to generate. - Primary or Secondary. Possible values include: "Primary", "Secondary". - :paramtype key_type: str or ~azure.mgmt.machinelearningservices.models.KeyType - :keyword key_value: The value the key is set to. - :paramtype key_value: str - """ - super(RegenerateEndpointKeysRequest, self).__init__(**kwargs) - self.key_type = kwargs['key_type'] - self.key_value = kwargs.get('key_value', None) - - -class Registry(TrackedResource): - """Registry. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: A set of tags. Resource tags. - :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. - :vartype location: str - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :vartype kind: str - :ivar sku: Sku details required for ARM contract for Autoscaling. - :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar discovery_url: Discovery URL for the Registry. - :vartype discovery_url: str - :ivar intellectual_property_publisher: IntellectualPropertyPublisher for the registry. - :vartype intellectual_property_publisher: str - :ivar managed_resource_group: ResourceId of the managed RG if the registry has system created - resources. - :vartype managed_resource_group: ~azure.mgmt.machinelearningservices.models.ArmResourceId - :ivar ml_flow_registry_uri: MLFlow Registry URI for the Registry. - :vartype ml_flow_registry_uri: str - :ivar registry_private_endpoint_connections: Private endpoint connections info used for pending - connections in private link portal. - :vartype registry_private_endpoint_connections: - list[~azure.mgmt.machinelearningservices.models.RegistryPrivateEndpointConnection] - :ivar public_network_access: Is the Registry accessible from the internet? - Possible values: "Enabled" or "Disabled". - :vartype public_network_access: str - :ivar region_details: Details of each region the registry is in. - :vartype region_details: - list[~azure.mgmt.machinelearningservices.models.RegistryRegionArmDetails] - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'}, - 'intellectual_property_publisher': {'key': 'properties.intellectualPropertyPublisher', 'type': 'str'}, - 'managed_resource_group': {'key': 'properties.managedResourceGroup', 'type': 'ArmResourceId'}, - 'ml_flow_registry_uri': {'key': 'properties.mlFlowRegistryUri', 'type': 'str'}, - 'registry_private_endpoint_connections': {'key': 'properties.registryPrivateEndpointConnections', 'type': '[RegistryPrivateEndpointConnection]'}, - 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, - 'region_details': {'key': 'properties.regionDetails', 'type': '[RegistryRegionArmDetails]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword tags: A set of tags. Resource tags. - :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. - :paramtype location: str - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :paramtype kind: str - :keyword sku: Sku details required for ARM contract for Autoscaling. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - :keyword discovery_url: Discovery URL for the Registry. - :paramtype discovery_url: str - :keyword intellectual_property_publisher: IntellectualPropertyPublisher for the registry. - :paramtype intellectual_property_publisher: str - :keyword managed_resource_group: ResourceId of the managed RG if the registry has system - created resources. - :paramtype managed_resource_group: ~azure.mgmt.machinelearningservices.models.ArmResourceId - :keyword ml_flow_registry_uri: MLFlow Registry URI for the Registry. - :paramtype ml_flow_registry_uri: str - :keyword registry_private_endpoint_connections: Private endpoint connections info used for - pending connections in private link portal. - :paramtype registry_private_endpoint_connections: - list[~azure.mgmt.machinelearningservices.models.RegistryPrivateEndpointConnection] - :keyword public_network_access: Is the Registry accessible from the internet? - Possible values: "Enabled" or "Disabled". - :paramtype public_network_access: str - :keyword region_details: Details of each region the registry is in. - :paramtype region_details: - list[~azure.mgmt.machinelearningservices.models.RegistryRegionArmDetails] - """ - super(Registry, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.sku = kwargs.get('sku', None) - self.discovery_url = kwargs.get('discovery_url', None) - self.intellectual_property_publisher = kwargs.get('intellectual_property_publisher', None) - self.managed_resource_group = kwargs.get('managed_resource_group', None) - self.ml_flow_registry_uri = kwargs.get('ml_flow_registry_uri', None) - self.registry_private_endpoint_connections = kwargs.get('registry_private_endpoint_connections', None) - self.public_network_access = kwargs.get('public_network_access', None) - self.region_details = kwargs.get('region_details', None) - - -class RegistryListCredentialsResult(msrest.serialization.Model): - """RegistryListCredentialsResult. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar location: The location of the workspace ACR. - :vartype location: str - :ivar passwords: - :vartype passwords: list[~azure.mgmt.machinelearningservices.models.Password] - :ivar username: The username of the workspace ACR. - :vartype username: str - """ - - _validation = { - 'location': {'readonly': True}, - 'username': {'readonly': True}, - } - - _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'passwords': {'key': 'passwords', 'type': '[Password]'}, - 'username': {'key': 'username', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword passwords: - :paramtype passwords: list[~azure.mgmt.machinelearningservices.models.Password] - """ - super(RegistryListCredentialsResult, self).__init__(**kwargs) - self.location = None - self.passwords = kwargs.get('passwords', None) - self.username = None - - -class RegistryPartialManagedServiceIdentity(ManagedServiceIdentity): - """Managed service identity (system assigned and/or user assigned identities). - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar principal_id: The service principal ID of the system assigned identity. This property - will only be provided for a system assigned identity. - :vartype principal_id: str - :ivar tenant_id: The tenant ID of the system assigned identity. This property will only be - provided for a system assigned identity. - :vartype tenant_id: str - :ivar type: Required. Type of managed service identity (where both SystemAssigned and - UserAssigned types are allowed). Possible values include: "None", "SystemAssigned", - "UserAssigned", "SystemAssigned,UserAssigned". - :vartype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType - :ivar user_assigned_identities: The set of user assigned identities associated with the - resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. - The dictionary values can be empty objects ({}) in requests. - :vartype user_assigned_identities: dict[str, - ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] - """ - - _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'type': {'required': True}, - } - - _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword type: Required. Type of managed service identity (where both SystemAssigned and - UserAssigned types are allowed). Possible values include: "None", "SystemAssigned", - "UserAssigned", "SystemAssigned,UserAssigned". - :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType - :keyword user_assigned_identities: The set of user assigned identities associated with the - resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: - '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. - The dictionary values can be empty objects ({}) in requests. - :paramtype user_assigned_identities: dict[str, - ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] - """ - super(RegistryPartialManagedServiceIdentity, self).__init__(**kwargs) - - -class RegistryPrivateEndpointConnection(msrest.serialization.Model): - """Private endpoint connection definition. - - :ivar id: This is the private endpoint connection name created on SRP - Full resource id: - /subscriptions/{subId}/resourceGroups/{rgName}/providers/Microsoft.MachineLearningServices/{resourceType}/{resourceName}/registryPrivateEndpointConnections/{peConnectionName}. - :vartype id: str - :ivar location: Same as workspace location. - :vartype location: str - :ivar group_ids: The group ids. - :vartype group_ids: list[str] - :ivar private_endpoint: The PE network resource that is linked to this PE connection. - :vartype private_endpoint: ~azure.mgmt.machinelearningservices.models.PrivateEndpointResource - :ivar registry_private_link_service_connection_state: The connection state. - :vartype registry_private_link_service_connection_state: - ~azure.mgmt.machinelearningservices.models.RegistryPrivateLinkServiceConnectionState - :ivar provisioning_state: One of null, "Succeeded", "Provisioning", "Failed". While not - approved, it's null. - :vartype provisioning_state: str - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'group_ids': {'key': 'properties.groupIds', 'type': '[str]'}, - 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpointResource'}, - 'registry_private_link_service_connection_state': {'key': 'properties.registryPrivateLinkServiceConnectionState', 'type': 'RegistryPrivateLinkServiceConnectionState'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword id: This is the private endpoint connection name created on SRP - Full resource id: - /subscriptions/{subId}/resourceGroups/{rgName}/providers/Microsoft.MachineLearningServices/{resourceType}/{resourceName}/registryPrivateEndpointConnections/{peConnectionName}. - :paramtype id: str - :keyword location: Same as workspace location. - :paramtype location: str - :keyword group_ids: The group ids. - :paramtype group_ids: list[str] - :keyword private_endpoint: The PE network resource that is linked to this PE connection. - :paramtype private_endpoint: ~azure.mgmt.machinelearningservices.models.PrivateEndpointResource - :keyword registry_private_link_service_connection_state: The connection state. - :paramtype registry_private_link_service_connection_state: - ~azure.mgmt.machinelearningservices.models.RegistryPrivateLinkServiceConnectionState - :keyword provisioning_state: One of null, "Succeeded", "Provisioning", "Failed". While not - approved, it's null. - :paramtype provisioning_state: str - """ - super(RegistryPrivateEndpointConnection, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.location = kwargs.get('location', None) - self.group_ids = kwargs.get('group_ids', None) - self.private_endpoint = kwargs.get('private_endpoint', None) - self.registry_private_link_service_connection_state = kwargs.get('registry_private_link_service_connection_state', None) - self.provisioning_state = kwargs.get('provisioning_state', None) - - -class RegistryPrivateLinkServiceConnectionState(msrest.serialization.Model): - """The connection state. - - :ivar actions_required: Some RP chose "None". Other RPs use this for region expansion. - :vartype actions_required: str - :ivar description: User-defined message that, per NRP doc, may be used for approval-related - message. - :vartype description: str - :ivar status: Connection status of the service consumer with the service provider. Possible - values include: "Approved", "Pending", "Rejected", "Disconnected", "Timeout". - :vartype status: str or - ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus - """ - - _attribute_map = { - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword actions_required: Some RP chose "None". Other RPs use this for region expansion. - :paramtype actions_required: str - :keyword description: User-defined message that, per NRP doc, may be used for approval-related - message. - :paramtype description: str - :keyword status: Connection status of the service consumer with the service provider. Possible - values include: "Approved", "Pending", "Rejected", "Disconnected", "Timeout". - :paramtype status: str or - ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus - """ - super(RegistryPrivateLinkServiceConnectionState, self).__init__(**kwargs) - self.actions_required = kwargs.get('actions_required', None) - self.description = kwargs.get('description', None) - self.status = kwargs.get('status', None) - - -class RegistryRegionArmDetails(msrest.serialization.Model): - """Details for each region the registry is in. - - :ivar acr_details: List of ACR accounts. - :vartype acr_details: list[~azure.mgmt.machinelearningservices.models.AcrDetails] - :ivar location: The location where the registry exists. - :vartype location: str - :ivar storage_account_details: List of storage accounts. - :vartype storage_account_details: - list[~azure.mgmt.machinelearningservices.models.StorageAccountDetails] - """ - - _attribute_map = { - 'acr_details': {'key': 'acrDetails', 'type': '[AcrDetails]'}, - 'location': {'key': 'location', 'type': 'str'}, - 'storage_account_details': {'key': 'storageAccountDetails', 'type': '[StorageAccountDetails]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword acr_details: List of ACR accounts. - :paramtype acr_details: list[~azure.mgmt.machinelearningservices.models.AcrDetails] - :keyword location: The location where the registry exists. - :paramtype location: str - :keyword storage_account_details: List of storage accounts. - :paramtype storage_account_details: - list[~azure.mgmt.machinelearningservices.models.StorageAccountDetails] - """ - super(RegistryRegionArmDetails, self).__init__(**kwargs) - self.acr_details = kwargs.get('acr_details', None) - self.location = kwargs.get('location', None) - self.storage_account_details = kwargs.get('storage_account_details', None) - - -class RegistryTrackedResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of Registry entities. - - :ivar next_link: The link to the next page of Registry objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type Registry. - :vartype value: list[~azure.mgmt.machinelearningservices.models.Registry] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Registry]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of Registry objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type Registry. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.Registry] - """ - super(RegistryTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class Regression(AutoMLVertical, TableVertical): - """Regression task in AutoML Table vertical. - - All required parameters must be populated in order to send to Azure. - - :ivar cv_split_column_names: Columns to use for CVSplit data. - :vartype cv_split_column_names: list[str] - :ivar featurization_settings: Featurization inputs needed for AutoML job. - :vartype featurization_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings - :ivar fixed_parameters: Model/training parameters that will remain constant throughout - training. - :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters - :ivar limit_settings: Execution constraints for AutoMLJob. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings - :ivar n_cross_validations: Number of cross validation folds to be applied on training dataset - when validation dataset is not provided. - :vartype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] - :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings - :ivar test_data: Test data input. - :vartype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar test_data_size: The fraction of test dataset that needs to be set aside for validation - purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype test_data_size: float - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :vartype validation_data_size: float - :ivar weight_column_name: The name of the sample weight column. Automated ML supports a - weighted column as an input, causing rows in the data to be weighted up or down. - :vartype weight_column_name: str - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric for regression task. Possible values include: - "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", - "NormalizedMeanAbsoluteError". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.RegressionPrimaryMetrics - :ivar training_settings: Inputs for training phase for an AutoML Job. - :vartype training_settings: - ~azure.mgmt.machinelearningservices.models.RegressionTrainingSettings - """ - - _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, - } - - _attribute_map = { - 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'TableFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, - 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, - 'search_space': {'key': 'searchSpace', 'type': '[TableParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'TableSweepSettings'}, - 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, - 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - 'training_settings': {'key': 'trainingSettings', 'type': 'RegressionTrainingSettings'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword cv_split_column_names: Columns to use for CVSplit data. - :paramtype cv_split_column_names: list[str] - :keyword featurization_settings: Featurization inputs needed for AutoML job. - :paramtype featurization_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalFeaturizationSettings - :keyword fixed_parameters: Model/training parameters that will remain constant throughout - training. - :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.TableFixedParameters - :keyword limit_settings: Execution constraints for AutoMLJob. - :paramtype limit_settings: - ~azure.mgmt.machinelearningservices.models.TableVerticalLimitSettings - :keyword n_cross_validations: Number of cross validation folds to be applied on training - dataset - when validation dataset is not provided. - :paramtype n_cross_validations: ~azure.mgmt.machinelearningservices.models.NCrossValidations - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: - list[~azure.mgmt.machinelearningservices.models.TableParameterSubspace] - :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.TableSweepSettings - :keyword test_data: Test data input. - :paramtype test_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword test_data_size: The fraction of test dataset that needs to be set aside for validation - purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype test_data_size: float - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword validation_data_size: The fraction of training dataset that needs to be set aside for - validation purpose. - Values between (0.0 , 1.0) - Applied when validation dataset is not provided. - :paramtype validation_data_size: float - :keyword weight_column_name: The name of the sample weight column. Automated ML supports a - weighted column as an input, causing rows in the data to be weighted up or down. - :paramtype weight_column_name: str - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword primary_metric: Primary metric for regression task. Possible values include: - "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", - "NormalizedMeanAbsoluteError". - :paramtype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.RegressionPrimaryMetrics - :keyword training_settings: Inputs for training phase for an AutoML Job. - :paramtype training_settings: - ~azure.mgmt.machinelearningservices.models.RegressionTrainingSettings - """ - super(Regression, self).__init__(**kwargs) - self.cv_split_column_names = kwargs.get('cv_split_column_names', None) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.n_cross_validations = kwargs.get('n_cross_validations', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.test_data = kwargs.get('test_data', None) - self.test_data_size = kwargs.get('test_data_size', None) - self.validation_data = kwargs.get('validation_data', None) - self.validation_data_size = kwargs.get('validation_data_size', None) - self.weight_column_name = kwargs.get('weight_column_name', None) - self.task_type = 'Regression' # type: str - self.primary_metric = kwargs.get('primary_metric', None) - self.training_settings = kwargs.get('training_settings', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] - - -class RegressionModelPerformanceMetricThreshold(ModelPerformanceMetricThresholdBase): - """RegressionModelPerformanceMetricThreshold. - - All required parameters must be populated in order to send to Azure. - - :ivar model_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Classification", "Regression". - :vartype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType - :ivar threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The regression model performance metric to calculate. - Possible values include: "MeanAbsoluteError", "RootMeanSquaredError", "MeanSquaredError". - :vartype metric: str or - ~azure.mgmt.machinelearningservices.models.RegressionModelPerformanceMetric - """ - - _validation = { - 'model_type': {'required': True}, - 'metric': {'required': True}, - } - - _attribute_map = { - 'model_type': {'key': 'modelType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword threshold: The threshold value. If null, a default value will be set depending on the - selected metric. - :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The regression model performance metric to calculate. - Possible values include: "MeanAbsoluteError", "RootMeanSquaredError", "MeanSquaredError". - :paramtype metric: str or - ~azure.mgmt.machinelearningservices.models.RegressionModelPerformanceMetric - """ - super(RegressionModelPerformanceMetricThreshold, self).__init__(**kwargs) - self.model_type = 'Regression' # type: str - self.metric = kwargs['metric'] - - -class RegressionTrainingSettings(TrainingSettings): - """Regression Training related configuration. - - :ivar enable_dnn_training: Enable recommendation of DNN models. - :vartype enable_dnn_training: bool - :ivar enable_model_explainability: Flag to turn on explainability on best model. - :vartype enable_model_explainability: bool - :ivar enable_onnx_compatible_models: Flag for enabling onnx compatible models. - :vartype enable_onnx_compatible_models: bool - :ivar enable_stack_ensemble: Enable stack ensemble run. - :vartype enable_stack_ensemble: bool - :ivar enable_vote_ensemble: Enable voting ensemble run. - :vartype enable_vote_ensemble: bool - :ivar ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model - generation, multiple fitted models from the previous child runs are downloaded. - Configure this parameter with a higher value than 300 secs, if more time is needed. - :vartype ensemble_model_download_timeout: ~datetime.timedelta - :ivar stack_ensemble_settings: Stack ensemble settings for stack ensemble run. - :vartype stack_ensemble_settings: - ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings - :ivar training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to - 'non-distributed' for now, however in the future may result in mixed mode or heuristics based - mode selection. Default is 'auto'. - If 'Distributed' then only distributed featurization is used and distributed algorithms are - chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". - :vartype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode - :ivar allowed_training_algorithms: Allowed models for regression task. - :vartype allowed_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.RegressionModels] - :ivar blocked_training_algorithms: Blocked models for regression task. - :vartype blocked_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.RegressionModels] - """ - - _attribute_map = { - 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, - 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, - 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, - 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, - 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, - 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, - 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, - 'training_mode': {'key': 'trainingMode', 'type': 'str'}, - 'allowed_training_algorithms': {'key': 'allowedTrainingAlgorithms', 'type': '[str]'}, - 'blocked_training_algorithms': {'key': 'blockedTrainingAlgorithms', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword enable_dnn_training: Enable recommendation of DNN models. - :paramtype enable_dnn_training: bool - :keyword enable_model_explainability: Flag to turn on explainability on best model. - :paramtype enable_model_explainability: bool - :keyword enable_onnx_compatible_models: Flag for enabling onnx compatible models. - :paramtype enable_onnx_compatible_models: bool - :keyword enable_stack_ensemble: Enable stack ensemble run. - :paramtype enable_stack_ensemble: bool - :keyword enable_vote_ensemble: Enable voting ensemble run. - :paramtype enable_vote_ensemble: bool - :keyword ensemble_model_download_timeout: During VotingEnsemble and StackEnsemble model - generation, multiple fitted models from the previous child runs are downloaded. - Configure this parameter with a higher value than 300 secs, if more time is needed. - :paramtype ensemble_model_download_timeout: ~datetime.timedelta - :keyword stack_ensemble_settings: Stack ensemble settings for stack ensemble run. - :paramtype stack_ensemble_settings: - ~azure.mgmt.machinelearningservices.models.StackEnsembleSettings - :keyword training_mode: TrainingMode mode - Setting to 'auto' is same as setting it to - 'non-distributed' for now, however in the future may result in mixed mode or heuristics based - mode selection. Default is 'auto'. - If 'Distributed' then only distributed featurization is used and distributed algorithms are - chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". - :paramtype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode - :keyword allowed_training_algorithms: Allowed models for regression task. - :paramtype allowed_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.RegressionModels] - :keyword blocked_training_algorithms: Blocked models for regression task. - :paramtype blocked_training_algorithms: list[str or - ~azure.mgmt.machinelearningservices.models.RegressionModels] - """ - super(RegressionTrainingSettings, self).__init__(**kwargs) - self.allowed_training_algorithms = kwargs.get('allowed_training_algorithms', None) - self.blocked_training_algorithms = kwargs.get('blocked_training_algorithms', None) - - -class RequestLogging(msrest.serialization.Model): - """RequestLogging. - - :ivar capture_headers: For payload logging, we only collect payload by default. If customers - also want to collect the specified headers, they can set them in captureHeaders so that backend - will collect those headers along with payload. - :vartype capture_headers: list[str] - """ - - _attribute_map = { - 'capture_headers': {'key': 'captureHeaders', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword capture_headers: For payload logging, we only collect payload by default. If customers - also want to collect the specified headers, they can set them in captureHeaders so that backend - will collect those headers along with payload. - :paramtype capture_headers: list[str] - """ - super(RequestLogging, self).__init__(**kwargs) - self.capture_headers = kwargs.get('capture_headers', None) - - -class ResizeSchema(msrest.serialization.Model): - """Schema for Compute Instance resize. - - :ivar target_vm_size: The name of the virtual machine size. - :vartype target_vm_size: str - """ - - _attribute_map = { - 'target_vm_size': {'key': 'targetVMSize', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword target_vm_size: The name of the virtual machine size. - :paramtype target_vm_size: str - """ - super(ResizeSchema, self).__init__(**kwargs) - self.target_vm_size = kwargs.get('target_vm_size', None) - - -class ResourceId(msrest.serialization.Model): - """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Required. The ID of the resource. - :vartype id: str - """ - - _validation = { - 'id': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword id: Required. The ID of the resource. - :paramtype id: str - """ - super(ResourceId, self).__init__(**kwargs) - self.id = kwargs['id'] - - -class ResourceName(msrest.serialization.Model): - """The Resource Name. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The name of the resource. - :vartype value: str - :ivar localized_value: The localized name of the resource. - :vartype localized_value: str - """ - - _validation = { - 'value': {'readonly': True}, - 'localized_value': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'localized_value': {'key': 'localizedValue', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ResourceName, self).__init__(**kwargs) - self.value = None - self.localized_value = None - - -class ResourceQuota(msrest.serialization.Model): - """The quota assigned to a resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar aml_workspace_location: Region of the AML workspace in the id. - :vartype aml_workspace_location: str - :ivar type: Specifies the resource type. - :vartype type: str - :ivar name: Name of the resource. - :vartype name: ~azure.mgmt.machinelearningservices.models.ResourceName - :ivar limit: The maximum permitted quota of the resource. - :vartype limit: long - :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit - """ - - _validation = { - 'id': {'readonly': True}, - 'aml_workspace_location': {'readonly': True}, - 'type': {'readonly': True}, - 'name': {'readonly': True}, - 'limit': {'readonly': True}, - 'unit': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'ResourceName'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(ResourceQuota, self).__init__(**kwargs) - self.id = None - self.aml_workspace_location = None - self.type = None - self.name = None - self.limit = None - self.unit = None - - -class Route(msrest.serialization.Model): - """Route. - - All required parameters must be populated in order to send to Azure. - - :ivar path: Required. [Required] The path for the route. - :vartype path: str - :ivar port: Required. [Required] The port for the route. - :vartype port: int - """ - - _validation = { - 'path': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'port': {'required': True}, - } - - _attribute_map = { - 'path': {'key': 'path', 'type': 'str'}, - 'port': {'key': 'port', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword path: Required. [Required] The path for the route. - :paramtype path: str - :keyword port: Required. [Required] The port for the route. - :paramtype port: int - """ - super(Route, self).__init__(**kwargs) - self.path = kwargs['path'] - self.port = kwargs['port'] - - -class SASAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): - """SASAuthTypeWorkspaceConnectionProperties. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar expiry_time: - :vartype expiry_time: ~datetime.datetime - :ivar metadata: Any object. - :vartype metadata: any - :ivar target: - :vartype target: str - :ivar credentials: - :vartype credentials: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionSharedAccessSignature - """ - - _validation = { - 'auth_type': {'required': True}, - } - - _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionSharedAccessSignature'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :keyword expiry_time: - :paramtype expiry_time: ~datetime.datetime - :keyword metadata: Any object. - :paramtype metadata: any - :keyword target: - :paramtype target: str - :keyword credentials: - :paramtype credentials: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionSharedAccessSignature - """ - super(SASAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'SAS' # type: str - self.credentials = kwargs.get('credentials', None) - - -class SASCredentialDto(PendingUploadCredentialDto): - """SASCredentialDto. - - All required parameters must be populated in order to send to Azure. - - :ivar credential_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "SAS". - :vartype credential_type: str or - ~azure.mgmt.machinelearningservices.models.PendingUploadCredentialType - :ivar sas_uri: Full SAS Uri, including the storage, container/blob path and SAS token. - :vartype sas_uri: str - """ - - _validation = { - 'credential_type': {'required': True}, - } - - _attribute_map = { - 'credential_type': {'key': 'credentialType', 'type': 'str'}, - 'sas_uri': {'key': 'sasUri', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword sas_uri: Full SAS Uri, including the storage, container/blob path and SAS token. - :paramtype sas_uri: str - """ - super(SASCredentialDto, self).__init__(**kwargs) - self.credential_type = 'SAS' # type: str - self.sas_uri = kwargs.get('sas_uri', None) - - -class SasDatastoreCredentials(DatastoreCredentials): - """SAS datastore credentials configuration. - - All required parameters must be populated in order to send to Azure. - - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". - :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType - :ivar secrets: Required. [Required] Storage container secrets. - :vartype secrets: ~azure.mgmt.machinelearningservices.models.SasDatastoreSecrets - """ - - _validation = { - 'credentials_type': {'required': True}, - 'secrets': {'required': True}, - } - - _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'SasDatastoreSecrets'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword secrets: Required. [Required] Storage container secrets. - :paramtype secrets: ~azure.mgmt.machinelearningservices.models.SasDatastoreSecrets - """ - super(SasDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'Sas' # type: str - self.secrets = kwargs['secrets'] - - -class SasDatastoreSecrets(DatastoreSecrets): - """Datastore SAS secrets. - - All required parameters must be populated in order to send to Azure. - - :ivar secrets_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", "Sas", - "ServicePrincipal", "KerberosPassword", "KerberosKeytab". - :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType - :ivar sas_token: Storage container SAS token. - :vartype sas_token: str - """ - - _validation = { - 'secrets_type': {'required': True}, - } - - _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'sas_token': {'key': 'sasToken', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword sas_token: Storage container SAS token. - :paramtype sas_token: str - """ - super(SasDatastoreSecrets, self).__init__(**kwargs) - self.secrets_type = 'Sas' # type: str - self.sas_token = kwargs.get('sas_token', None) - - -class ScaleSettings(msrest.serialization.Model): - """scale settings for AML Compute. - - All required parameters must be populated in order to send to Azure. - - :ivar max_node_count: Required. Max number of nodes to use. - :vartype max_node_count: int - :ivar min_node_count: Min number of nodes to use. - :vartype min_node_count: int - :ivar node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This - string needs to be in the RFC Format. - :vartype node_idle_time_before_scale_down: ~datetime.timedelta - """ - - _validation = { - 'max_node_count': {'required': True}, - } - - _attribute_map = { - 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, - 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, - 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword max_node_count: Required. Max number of nodes to use. - :paramtype max_node_count: int - :keyword min_node_count: Min number of nodes to use. - :paramtype min_node_count: int - :keyword node_idle_time_before_scale_down: Node Idle Time before scaling down amlCompute. This - string needs to be in the RFC Format. - :paramtype node_idle_time_before_scale_down: ~datetime.timedelta - """ - super(ScaleSettings, self).__init__(**kwargs) - self.max_node_count = kwargs['max_node_count'] - self.min_node_count = kwargs.get('min_node_count', 0) - self.node_idle_time_before_scale_down = kwargs.get('node_idle_time_before_scale_down', None) - - -class ScaleSettingsInformation(msrest.serialization.Model): - """Desired scale settings for the amlCompute. - - :ivar scale_settings: scale settings for AML Compute. - :vartype scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings - """ - - _attribute_map = { - 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword scale_settings: scale settings for AML Compute. - :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings - """ - super(ScaleSettingsInformation, self).__init__(**kwargs) - self.scale_settings = kwargs.get('scale_settings', None) - - -class Schedule(Resource): - """Azure Resource Manager resource envelope. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.ScheduleProperties - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ScheduleProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.ScheduleProperties - """ - super(Schedule, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class ScheduleBase(msrest.serialization.Model): - """ScheduleBase. - - :ivar id: A system assigned id for the schedule. - :vartype id: str - :ivar provisioning_status: The current deployment state of schedule. Possible values include: - "Completed", "Provisioning", "Failed". - :vartype provisioning_status: str or - ~azure.mgmt.machinelearningservices.models.ScheduleProvisioningState - :ivar status: Is the schedule enabled or disabled?. Possible values include: "Enabled", - "Disabled". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus - """ - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword id: A system assigned id for the schedule. - :paramtype id: str - :keyword provisioning_status: The current deployment state of schedule. Possible values - include: "Completed", "Provisioning", "Failed". - :paramtype provisioning_status: str or - ~azure.mgmt.machinelearningservices.models.ScheduleProvisioningState - :keyword status: Is the schedule enabled or disabled?. Possible values include: "Enabled", - "Disabled". - :paramtype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus - """ - super(ScheduleBase, self).__init__(**kwargs) - self.id = kwargs.get('id', None) - self.provisioning_status = kwargs.get('provisioning_status', None) - self.status = kwargs.get('status', None) - - -class ScheduleProperties(ResourceBase): - """Base definition of a schedule. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar action: Required. [Required] Specifies the action of the schedule. - :vartype action: ~azure.mgmt.machinelearningservices.models.ScheduleActionBase - :ivar display_name: Display name of schedule. - :vartype display_name: str - :ivar is_enabled: Is the schedule enabled?. - :vartype is_enabled: bool - :ivar provisioning_state: Provisioning state for the schedule. Possible values include: - "Creating", "Updating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ScheduleProvisioningStatus - :ivar trigger: Required. [Required] Specifies the trigger details. - :vartype trigger: ~azure.mgmt.machinelearningservices.models.TriggerBase - """ - - _validation = { - 'action': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'trigger': {'required': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'action': {'key': 'action', 'type': 'ScheduleActionBase'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'is_enabled': {'key': 'isEnabled', 'type': 'bool'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'trigger': {'key': 'trigger', 'type': 'TriggerBase'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword action: Required. [Required] Specifies the action of the schedule. - :paramtype action: ~azure.mgmt.machinelearningservices.models.ScheduleActionBase - :keyword display_name: Display name of schedule. - :paramtype display_name: str - :keyword is_enabled: Is the schedule enabled?. - :paramtype is_enabled: bool - :keyword trigger: Required. [Required] Specifies the trigger details. - :paramtype trigger: ~azure.mgmt.machinelearningservices.models.TriggerBase - """ - super(ScheduleProperties, self).__init__(**kwargs) - self.action = kwargs['action'] - self.display_name = kwargs.get('display_name', None) - self.is_enabled = kwargs.get('is_enabled', True) - self.provisioning_state = None - self.trigger = kwargs['trigger'] - - -class ScheduleResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of Schedule entities. - - :ivar next_link: The link to the next page of Schedule objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type Schedule. - :vartype value: list[~azure.mgmt.machinelearningservices.models.Schedule] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Schedule]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of Schedule objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type Schedule. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.Schedule] - """ - super(ScheduleResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class ScriptReference(msrest.serialization.Model): - """Script reference. - - :ivar script_source: The storage source of the script: inline, workspace. - :vartype script_source: str - :ivar script_data: The location of scripts in the mounted volume. - :vartype script_data: str - :ivar script_arguments: Optional command line arguments passed to the script to run. - :vartype script_arguments: str - :ivar timeout: Optional time period passed to timeout command. - :vartype timeout: str - """ - - _attribute_map = { - 'script_source': {'key': 'scriptSource', 'type': 'str'}, - 'script_data': {'key': 'scriptData', 'type': 'str'}, - 'script_arguments': {'key': 'scriptArguments', 'type': 'str'}, - 'timeout': {'key': 'timeout', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword script_source: The storage source of the script: inline, workspace. - :paramtype script_source: str - :keyword script_data: The location of scripts in the mounted volume. - :paramtype script_data: str - :keyword script_arguments: Optional command line arguments passed to the script to run. - :paramtype script_arguments: str - :keyword timeout: Optional time period passed to timeout command. - :paramtype timeout: str - """ - super(ScriptReference, self).__init__(**kwargs) - self.script_source = kwargs.get('script_source', None) - self.script_data = kwargs.get('script_data', None) - self.script_arguments = kwargs.get('script_arguments', None) - self.timeout = kwargs.get('timeout', None) - - -class ScriptsToExecute(msrest.serialization.Model): - """Customized setup scripts. - - :ivar startup_script: Script that's run every time the machine starts. - :vartype startup_script: ~azure.mgmt.machinelearningservices.models.ScriptReference - :ivar creation_script: Script that's run only once during provision of the compute. - :vartype creation_script: ~azure.mgmt.machinelearningservices.models.ScriptReference - """ - - _attribute_map = { - 'startup_script': {'key': 'startupScript', 'type': 'ScriptReference'}, - 'creation_script': {'key': 'creationScript', 'type': 'ScriptReference'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword startup_script: Script that's run every time the machine starts. - :paramtype startup_script: ~azure.mgmt.machinelearningservices.models.ScriptReference - :keyword creation_script: Script that's run only once during provision of the compute. - :paramtype creation_script: ~azure.mgmt.machinelearningservices.models.ScriptReference - """ - super(ScriptsToExecute, self).__init__(**kwargs) - self.startup_script = kwargs.get('startup_script', None) - self.creation_script = kwargs.get('creation_script', None) - - -class SecretConfiguration(msrest.serialization.Model): - """Secret Configuration definition. - - :ivar uri: Secret Uri. - Sample Uri : https://myvault.vault.azure.net/secrets/mysecretname/secretversion. - :vartype uri: str - :ivar workspace_secret_name: Name of secret in workspace key vault. - :vartype workspace_secret_name: str - """ - - _attribute_map = { - 'uri': {'key': 'uri', 'type': 'str'}, - 'workspace_secret_name': {'key': 'workspaceSecretName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword uri: Secret Uri. - Sample Uri : https://myvault.vault.azure.net/secrets/mysecretname/secretversion. - :paramtype uri: str - :keyword workspace_secret_name: Name of secret in workspace key vault. - :paramtype workspace_secret_name: str - """ - super(SecretConfiguration, self).__init__(**kwargs) - self.uri = kwargs.get('uri', None) - self.workspace_secret_name = kwargs.get('workspace_secret_name', None) - - -class ServerlessEndpoint(TrackedResource): - """ServerlessEndpoint. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: A set of tags. Resource tags. - :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. - :vartype location: str - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :vartype kind: str - :ivar properties: Required. [Required] Additional attributes of the entity. - :vartype properties: ~azure.mgmt.machinelearningservices.models.ServerlessEndpointProperties - :ivar sku: Sku details required for ARM contract for Autoscaling. - :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'ServerlessEndpointProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword tags: A set of tags. Resource tags. - :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. - :paramtype location: str - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for - resources of the same type. - :paramtype kind: str - :keyword properties: Required. [Required] Additional attributes of the entity. - :paramtype properties: ~azure.mgmt.machinelearningservices.models.ServerlessEndpointProperties - :keyword sku: Sku details required for ARM contract for Autoscaling. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - """ - super(ServerlessEndpoint, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.properties = kwargs['properties'] - self.sku = kwargs.get('sku', None) - - -class ServerlessEndpointProperties(msrest.serialization.Model): - """ServerlessEndpointProperties. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar inference_uri: The inference uri to target when making requests against the serverless - endpoint. - :vartype inference_uri: str - :ivar model_profile: Required. [Required] The model profile to configure the serverless - endpoint with. - :vartype model_profile: ~azure.mgmt.machinelearningservices.models.ModelProfile - :ivar provisioning_state: Provisioning state for the endpoint. Possible values include: - "Creating", "Deleting", "Succeeded", "Failed", "Updating", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState - """ - - _validation = { - 'inference_uri': {'readonly': True}, - 'model_profile': {'required': True}, - 'provisioning_state': {'readonly': True}, - } - - _attribute_map = { - 'inference_uri': {'key': 'inferenceUri', 'type': 'str'}, - 'model_profile': {'key': 'modelProfile', 'type': 'ModelProfile'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword model_profile: Required. [Required] The model profile to configure the serverless - endpoint with. - :paramtype model_profile: ~azure.mgmt.machinelearningservices.models.ModelProfile - """ - super(ServerlessEndpointProperties, self).__init__(**kwargs) - self.inference_uri = None - self.model_profile = kwargs['model_profile'] - self.provisioning_state = None - - -class ServerlessEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of ServerlessEndpoint entities. - - :ivar next_link: The link to the next page of ServerlessEndpoint objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type ServerlessEndpoint. - :vartype value: list[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ServerlessEndpoint]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of ServerlessEndpoint objects. If null, there are - no additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type ServerlessEndpoint. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] - """ - super(ServerlessEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class ServiceManagedResourcesSettings(msrest.serialization.Model): - """ServiceManagedResourcesSettings. - - :ivar cosmos_db: - :vartype cosmos_db: ~azure.mgmt.machinelearningservices.models.CosmosDbSettings - """ - - _attribute_map = { - 'cosmos_db': {'key': 'cosmosDb', 'type': 'CosmosDbSettings'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword cosmos_db: - :paramtype cosmos_db: ~azure.mgmt.machinelearningservices.models.CosmosDbSettings - """ - super(ServiceManagedResourcesSettings, self).__init__(**kwargs) - self.cosmos_db = kwargs.get('cosmos_db', None) - - -class ServicePrincipalAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): - """ServicePrincipalAuthTypeWorkspaceConnectionProperties. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar expiry_time: - :vartype expiry_time: ~datetime.datetime - :ivar metadata: Any object. - :vartype metadata: any - :ivar target: - :vartype target: str - :ivar credentials: - :vartype credentials: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionServicePrincipal - """ - - _validation = { - 'auth_type': {'required': True}, - } - - _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionServicePrincipal'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :keyword expiry_time: - :paramtype expiry_time: ~datetime.datetime - :keyword metadata: Any object. - :paramtype metadata: any - :keyword target: - :paramtype target: str - :keyword credentials: - :paramtype credentials: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionServicePrincipal - """ - super(ServicePrincipalAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'ServicePrincipal' # type: str - self.credentials = kwargs.get('credentials', None) - - -class ServicePrincipalDatastoreCredentials(DatastoreCredentials): - """Service Principal datastore credentials configuration. - - All required parameters must be populated in order to send to Azure. - - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". - :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType - :ivar authority_url: Authority URL used for authentication. - :vartype authority_url: str - :ivar client_id: Required. [Required] Service principal client ID. - :vartype client_id: str - :ivar resource_url: Resource the service principal has access to. - :vartype resource_url: str - :ivar secrets: Required. [Required] Service principal secrets. - :vartype secrets: ~azure.mgmt.machinelearningservices.models.ServicePrincipalDatastoreSecrets - :ivar tenant_id: Required. [Required] ID of the tenant to which the service principal belongs. - :vartype tenant_id: str - """ - - _validation = { - 'credentials_type': {'required': True}, - 'client_id': {'required': True}, - 'secrets': {'required': True}, - 'tenant_id': {'required': True}, - } - - _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'authority_url': {'key': 'authorityUrl', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'resource_url': {'key': 'resourceUrl', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'ServicePrincipalDatastoreSecrets'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword authority_url: Authority URL used for authentication. - :paramtype authority_url: str - :keyword client_id: Required. [Required] Service principal client ID. - :paramtype client_id: str - :keyword resource_url: Resource the service principal has access to. - :paramtype resource_url: str - :keyword secrets: Required. [Required] Service principal secrets. - :paramtype secrets: ~azure.mgmt.machinelearningservices.models.ServicePrincipalDatastoreSecrets - :keyword tenant_id: Required. [Required] ID of the tenant to which the service principal - belongs. - :paramtype tenant_id: str - """ - super(ServicePrincipalDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'ServicePrincipal' # type: str - self.authority_url = kwargs.get('authority_url', None) - self.client_id = kwargs['client_id'] - self.resource_url = kwargs.get('resource_url', None) - self.secrets = kwargs['secrets'] - self.tenant_id = kwargs['tenant_id'] - - -class ServicePrincipalDatastoreSecrets(DatastoreSecrets): - """Datastore Service Principal secrets. - - All required parameters must be populated in order to send to Azure. - - :ivar secrets_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", "Sas", - "ServicePrincipal", "KerberosPassword", "KerberosKeytab". - :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType - :ivar client_secret: Service principal secret. - :vartype client_secret: str - """ - - _validation = { - 'secrets_type': {'required': True}, - } - - _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'client_secret': {'key': 'clientSecret', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword client_secret: Service principal secret. - :paramtype client_secret: str - """ - super(ServicePrincipalDatastoreSecrets, self).__init__(**kwargs) - self.secrets_type = 'ServicePrincipal' # type: str - self.client_secret = kwargs.get('client_secret', None) - - -class ServiceTagDestination(msrest.serialization.Model): - """Service Tag destination for a Service Tag Outbound Rule for the managed network of a machine learning workspace. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar action: The action enum for networking rule. Possible values include: "Allow", "Deny". - :vartype action: str or ~azure.mgmt.machinelearningservices.models.RuleAction - :ivar address_prefixes: Optional, if provided, the ServiceTag property will be ignored. - :vartype address_prefixes: list[str] - :ivar port_ranges: - :vartype port_ranges: str - :ivar protocol: - :vartype protocol: str - :ivar service_tag: - :vartype service_tag: str - """ - - _validation = { - 'address_prefixes': {'readonly': True}, - } - - _attribute_map = { - 'action': {'key': 'action', 'type': 'str'}, - 'address_prefixes': {'key': 'addressPrefixes', 'type': '[str]'}, - 'port_ranges': {'key': 'portRanges', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'service_tag': {'key': 'serviceTag', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword action: The action enum for networking rule. Possible values include: "Allow", "Deny". - :paramtype action: str or ~azure.mgmt.machinelearningservices.models.RuleAction - :keyword port_ranges: - :paramtype port_ranges: str - :keyword protocol: - :paramtype protocol: str - :keyword service_tag: - :paramtype service_tag: str - """ - super(ServiceTagDestination, self).__init__(**kwargs) - self.action = kwargs.get('action', None) - self.address_prefixes = None - self.port_ranges = kwargs.get('port_ranges', None) - self.protocol = kwargs.get('protocol', None) - self.service_tag = kwargs.get('service_tag', None) - - -class ServiceTagOutboundRule(OutboundRule): - """Service Tag Outbound Rule for the managed network of a machine learning workspace. - - All required parameters must be populated in order to send to Azure. - - :ivar category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :ivar status: Type of a managed network Outbound Rule of a machine learning workspace. Possible - values include: "Inactive", "Active". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus - :ivar type: Required. Type of a managed network Outbound Rule of a machine learning - workspace.Constant filled by server. Possible values include: "FQDN", "PrivateEndpoint", - "ServiceTag". - :vartype type: str or ~azure.mgmt.machinelearningservices.models.RuleType - :ivar destination: Service Tag destination for a Service Tag Outbound Rule for the managed - network of a machine learning workspace. - :vartype destination: ~azure.mgmt.machinelearningservices.models.ServiceTagDestination - """ - - _validation = { - 'type': {'required': True}, - } - - _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'destination': {'key': 'destination', 'type': 'ServiceTagDestination'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :keyword status: Type of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Inactive", "Active". - :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus - :keyword destination: Service Tag destination for a Service Tag Outbound Rule for the managed - network of a machine learning workspace. - :paramtype destination: ~azure.mgmt.machinelearningservices.models.ServiceTagDestination - """ - super(ServiceTagOutboundRule, self).__init__(**kwargs) - self.type = 'ServiceTag' # type: str - self.destination = kwargs.get('destination', None) - - -class SetupScripts(msrest.serialization.Model): - """Details of customized scripts to execute for setting up the cluster. - - :ivar scripts: Customized setup scripts. - :vartype scripts: ~azure.mgmt.machinelearningservices.models.ScriptsToExecute - """ - - _attribute_map = { - 'scripts': {'key': 'scripts', 'type': 'ScriptsToExecute'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword scripts: Customized setup scripts. - :paramtype scripts: ~azure.mgmt.machinelearningservices.models.ScriptsToExecute - """ - super(SetupScripts, self).__init__(**kwargs) - self.scripts = kwargs.get('scripts', None) - - -class SharedPrivateLinkResource(msrest.serialization.Model): - """SharedPrivateLinkResource. - - :ivar name: Unique name of the private link. - :vartype name: str - :ivar group_id: group id of the private link. - :vartype group_id: str - :ivar private_link_resource_id: the resource id that private link links to. - :vartype private_link_resource_id: str - :ivar request_message: Request message. - :vartype request_message: str - :ivar status: Connection status of the service consumer with the service provider. Possible - values include: "Approved", "Pending", "Rejected", "Disconnected", "Timeout". - :vartype status: str or - ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus - """ - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, - 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, - 'status': {'key': 'properties.status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword name: Unique name of the private link. - :paramtype name: str - :keyword group_id: group id of the private link. - :paramtype group_id: str - :keyword private_link_resource_id: the resource id that private link links to. - :paramtype private_link_resource_id: str - :keyword request_message: Request message. - :paramtype request_message: str - :keyword status: Connection status of the service consumer with the service provider. Possible - values include: "Approved", "Pending", "Rejected", "Disconnected", "Timeout". - :paramtype status: str or - ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus - """ - super(SharedPrivateLinkResource, self).__init__(**kwargs) - self.name = kwargs.get('name', None) - self.group_id = kwargs.get('group_id', None) - self.private_link_resource_id = kwargs.get('private_link_resource_id', None) - self.request_message = kwargs.get('request_message', None) - self.status = kwargs.get('status', None) - - -class Sku(msrest.serialization.Model): - """The resource model definition representing SKU. - - All required parameters must be populated in order to send to Azure. - - :ivar name: Required. The name of the SKU. Ex - P3. It is typically a letter+number code. - :vartype name: str - :ivar tier: This field is required to be implemented by the Resource Provider if the service - has more than one tier, but is not required on a PUT. Possible values include: "Free", "Basic", - "Standard", "Premium". - :vartype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier - :ivar size: The SKU size. When the name field is the combination of tier and some other value, - this would be the standalone code. - :vartype size: str - :ivar family: If the service has different generations of hardware, for the same SKU, then that - can be captured here. - :vartype family: str - :ivar capacity: If the SKU supports scale out/in then the capacity integer should be included. - If scale out/in is not possible for the resource this may be omitted. - :vartype capacity: int - """ - - _validation = { - 'name': {'required': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - 'family': {'key': 'family', 'type': 'str'}, - 'capacity': {'key': 'capacity', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword name: Required. The name of the SKU. Ex - P3. It is typically a letter+number code. - :paramtype name: str - :keyword tier: This field is required to be implemented by the Resource Provider if the service - has more than one tier, but is not required on a PUT. Possible values include: "Free", "Basic", - "Standard", "Premium". - :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier - :keyword size: The SKU size. When the name field is the combination of tier and some other - value, this would be the standalone code. - :paramtype size: str - :keyword family: If the service has different generations of hardware, for the same SKU, then - that can be captured here. - :paramtype family: str - :keyword capacity: If the SKU supports scale out/in then the capacity integer should be - included. If scale out/in is not possible for the resource this may be omitted. - :paramtype capacity: int - """ - super(Sku, self).__init__(**kwargs) - self.name = kwargs['name'] - self.tier = kwargs.get('tier', None) - self.size = kwargs.get('size', None) - self.family = kwargs.get('family', None) - self.capacity = kwargs.get('capacity', None) - - -class SkuCapacity(msrest.serialization.Model): - """SKU capacity information. - - :ivar default: Gets or sets the default capacity. - :vartype default: int - :ivar maximum: Gets or sets the maximum. - :vartype maximum: int - :ivar minimum: Gets or sets the minimum. - :vartype minimum: int - :ivar scale_type: Gets or sets the type of the scale. Possible values include: "Automatic", - "Manual", "None". - :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.SkuScaleType - """ - - _attribute_map = { - 'default': {'key': 'default', 'type': 'int'}, - 'maximum': {'key': 'maximum', 'type': 'int'}, - 'minimum': {'key': 'minimum', 'type': 'int'}, - 'scale_type': {'key': 'scaleType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword default: Gets or sets the default capacity. - :paramtype default: int - :keyword maximum: Gets or sets the maximum. - :paramtype maximum: int - :keyword minimum: Gets or sets the minimum. - :paramtype minimum: int - :keyword scale_type: Gets or sets the type of the scale. Possible values include: "Automatic", - "Manual", "None". - :paramtype scale_type: str or ~azure.mgmt.machinelearningservices.models.SkuScaleType - """ - super(SkuCapacity, self).__init__(**kwargs) - self.default = kwargs.get('default', 0) - self.maximum = kwargs.get('maximum', 0) - self.minimum = kwargs.get('minimum', 0) - self.scale_type = kwargs.get('scale_type', None) - - -class SkuResource(msrest.serialization.Model): - """Fulfills ARM Contract requirement to list all available SKUS for a resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar capacity: Gets or sets the Sku Capacity. - :vartype capacity: ~azure.mgmt.machinelearningservices.models.SkuCapacity - :ivar resource_type: The resource type name. - :vartype resource_type: str - :ivar sku: Gets or sets the Sku. - :vartype sku: ~azure.mgmt.machinelearningservices.models.SkuSetting - """ - - _validation = { - 'resource_type': {'readonly': True}, - } - - _attribute_map = { - 'capacity': {'key': 'capacity', 'type': 'SkuCapacity'}, - 'resource_type': {'key': 'resourceType', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'SkuSetting'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword capacity: Gets or sets the Sku Capacity. - :paramtype capacity: ~azure.mgmt.machinelearningservices.models.SkuCapacity - :keyword sku: Gets or sets the Sku. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.SkuSetting - """ - super(SkuResource, self).__init__(**kwargs) - self.capacity = kwargs.get('capacity', None) - self.resource_type = None - self.sku = kwargs.get('sku', None) - - -class SkuResourceArmPaginatedResult(msrest.serialization.Model): - """A paginated list of SkuResource entities. - - :ivar next_link: The link to the next page of SkuResource objects. If null, there are no - additional pages. - :vartype next_link: str - :ivar value: An array of objects of type SkuResource. - :vartype value: list[~azure.mgmt.machinelearningservices.models.SkuResource] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[SkuResource]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page of SkuResource objects. If null, there are no - additional pages. - :paramtype next_link: str - :keyword value: An array of objects of type SkuResource. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.SkuResource] - """ - super(SkuResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class SkuSetting(msrest.serialization.Model): - """SkuSetting fulfills the need for stripped down SKU info in ARM contract. - - All required parameters must be populated in order to send to Azure. - - :ivar name: Required. [Required] The name of the SKU. Ex - P3. It is typically a letter+number - code. - :vartype name: str - :ivar tier: This field is required to be implemented by the Resource Provider if the service - has more than one tier, but is not required on a PUT. Possible values include: "Free", "Basic", - "Standard", "Premium". - :vartype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier - """ - - _validation = { - 'name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword name: Required. [Required] The name of the SKU. Ex - P3. It is typically a - letter+number code. - :paramtype name: str - :keyword tier: This field is required to be implemented by the Resource Provider if the service - has more than one tier, but is not required on a PUT. Possible values include: "Free", "Basic", - "Standard", "Premium". - :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier - """ - super(SkuSetting, self).__init__(**kwargs) - self.name = kwargs['name'] - self.tier = kwargs.get('tier', None) - - -class SparkJob(JobBaseProperties): - """Spark job definition. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar component_id: ARM resource ID of the component resource. - :vartype component_id: str - :ivar compute_id: ARM resource ID of the compute resource. - :vartype compute_id: str - :ivar display_name: Display name of job. - :vartype display_name: str - :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :vartype experiment_name: str - :ivar identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". - :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType - :ivar notification_setting: Notification setting for the job. - :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :ivar secrets_configuration: Configuration for secrets to be made available during runtime. - :vartype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :ivar services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus - :ivar archives: Archive files used in the job. - :vartype archives: list[str] - :ivar args: Arguments for the job. - :vartype args: str - :ivar code_id: Required. [Required] ARM resource ID of the code asset. - :vartype code_id: str - :ivar conf: Spark configured properties. - :vartype conf: dict[str, str] - :ivar entry: Required. [Required] The entry to execute on startup of the job. - :vartype entry: ~azure.mgmt.machinelearningservices.models.SparkJobEntry - :ivar environment_id: The ARM resource ID of the Environment specification for the job. - :vartype environment_id: str - :ivar files: Files used in the job. - :vartype files: list[str] - :ivar inputs: Mapping of input data bindings used in the job. - :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] - :ivar jars: Jar files used in the job. - :vartype jars: list[str] - :ivar outputs: Mapping of output data bindings used in the job. - :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] - :ivar py_files: Python files used in the job. - :vartype py_files: list[str] - :ivar queue_settings: Queue settings for the job. - :vartype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings - :ivar resources: Compute Resource configuration for the job. - :vartype resources: ~azure.mgmt.machinelearningservices.models.SparkResourceConfiguration - """ - - _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'code_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'entry': {'required': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'archives': {'key': 'archives', 'type': '[str]'}, - 'args': {'key': 'args', 'type': 'str'}, - 'code_id': {'key': 'codeId', 'type': 'str'}, - 'conf': {'key': 'conf', 'type': '{str}'}, - 'entry': {'key': 'entry', 'type': 'SparkJobEntry'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'files': {'key': 'files', 'type': '[str]'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'jars': {'key': 'jars', 'type': '[str]'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'py_files': {'key': 'pyFiles', 'type': '[str]'}, - 'queue_settings': {'key': 'queueSettings', 'type': 'QueueSettings'}, - 'resources': {'key': 'resources', 'type': 'SparkResourceConfiguration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword component_id: ARM resource ID of the component resource. - :paramtype component_id: str - :keyword compute_id: ARM resource ID of the compute resource. - :paramtype compute_id: str - :keyword display_name: Display name of job. - :paramtype display_name: str - :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :paramtype experiment_name: str - :keyword identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - :keyword notification_setting: Notification setting for the job. - :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :keyword secrets_configuration: Configuration for secrets to be made available during runtime. - :paramtype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :keyword services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :keyword archives: Archive files used in the job. - :paramtype archives: list[str] - :keyword args: Arguments for the job. - :paramtype args: str - :keyword code_id: Required. [Required] ARM resource ID of the code asset. - :paramtype code_id: str - :keyword conf: Spark configured properties. - :paramtype conf: dict[str, str] - :keyword entry: Required. [Required] The entry to execute on startup of the job. - :paramtype entry: ~azure.mgmt.machinelearningservices.models.SparkJobEntry - :keyword environment_id: The ARM resource ID of the Environment specification for the job. - :paramtype environment_id: str - :keyword files: Files used in the job. - :paramtype files: list[str] - :keyword inputs: Mapping of input data bindings used in the job. - :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] - :keyword jars: Jar files used in the job. - :paramtype jars: list[str] - :keyword outputs: Mapping of output data bindings used in the job. - :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] - :keyword py_files: Python files used in the job. - :paramtype py_files: list[str] - :keyword queue_settings: Queue settings for the job. - :paramtype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings - :keyword resources: Compute Resource configuration for the job. - :paramtype resources: ~azure.mgmt.machinelearningservices.models.SparkResourceConfiguration - """ - super(SparkJob, self).__init__(**kwargs) - self.job_type = 'Spark' # type: str - self.archives = kwargs.get('archives', None) - self.args = kwargs.get('args', None) - self.code_id = kwargs['code_id'] - self.conf = kwargs.get('conf', None) - self.entry = kwargs['entry'] - self.environment_id = kwargs.get('environment_id', None) - self.files = kwargs.get('files', None) - self.inputs = kwargs.get('inputs', None) - self.jars = kwargs.get('jars', None) - self.outputs = kwargs.get('outputs', None) - self.py_files = kwargs.get('py_files', None) - self.queue_settings = kwargs.get('queue_settings', None) - self.resources = kwargs.get('resources', None) - - -class SparkJobEntry(msrest.serialization.Model): - """Spark job entry point definition. - - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SparkJobPythonEntry, SparkJobScalaEntry. - - All required parameters must be populated in order to send to Azure. - - :ivar spark_job_entry_type: Required. [Required] Type of the job's entry point.Constant filled - by server. Possible values include: "SparkJobPythonEntry", "SparkJobScalaEntry". - :vartype spark_job_entry_type: str or - ~azure.mgmt.machinelearningservices.models.SparkJobEntryType - """ - - _validation = { - 'spark_job_entry_type': {'required': True}, - } - - _attribute_map = { - 'spark_job_entry_type': {'key': 'sparkJobEntryType', 'type': 'str'}, - } - - _subtype_map = { - 'spark_job_entry_type': {'SparkJobPythonEntry': 'SparkJobPythonEntry', 'SparkJobScalaEntry': 'SparkJobScalaEntry'} - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(SparkJobEntry, self).__init__(**kwargs) - self.spark_job_entry_type = None # type: Optional[str] - - -class SparkJobPythonEntry(SparkJobEntry): - """SparkJobPythonEntry. - - All required parameters must be populated in order to send to Azure. - - :ivar spark_job_entry_type: Required. [Required] Type of the job's entry point.Constant filled - by server. Possible values include: "SparkJobPythonEntry", "SparkJobScalaEntry". - :vartype spark_job_entry_type: str or - ~azure.mgmt.machinelearningservices.models.SparkJobEntryType - :ivar file: Required. [Required] Relative python file path for job entry point. - :vartype file: str - """ - - _validation = { - 'spark_job_entry_type': {'required': True}, - 'file': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'spark_job_entry_type': {'key': 'sparkJobEntryType', 'type': 'str'}, - 'file': {'key': 'file', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword file: Required. [Required] Relative python file path for job entry point. - :paramtype file: str - """ - super(SparkJobPythonEntry, self).__init__(**kwargs) - self.spark_job_entry_type = 'SparkJobPythonEntry' # type: str - self.file = kwargs['file'] - - -class SparkJobScalaEntry(SparkJobEntry): - """SparkJobScalaEntry. - - All required parameters must be populated in order to send to Azure. - - :ivar spark_job_entry_type: Required. [Required] Type of the job's entry point.Constant filled - by server. Possible values include: "SparkJobPythonEntry", "SparkJobScalaEntry". - :vartype spark_job_entry_type: str or - ~azure.mgmt.machinelearningservices.models.SparkJobEntryType - :ivar class_name: Required. [Required] Scala class name used as entry point. - :vartype class_name: str - """ - - _validation = { - 'spark_job_entry_type': {'required': True}, - 'class_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'spark_job_entry_type': {'key': 'sparkJobEntryType', 'type': 'str'}, - 'class_name': {'key': 'className', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword class_name: Required. [Required] Scala class name used as entry point. - :paramtype class_name: str - """ - super(SparkJobScalaEntry, self).__init__(**kwargs) - self.spark_job_entry_type = 'SparkJobScalaEntry' # type: str - self.class_name = kwargs['class_name'] - - -class SparkResourceConfiguration(msrest.serialization.Model): - """SparkResourceConfiguration. - - :ivar instance_type: Optional type of VM used as supported by the compute target. - :vartype instance_type: str - :ivar runtime_version: Version of spark runtime used for the job. - :vartype runtime_version: str - """ - - _attribute_map = { - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword instance_type: Optional type of VM used as supported by the compute target. - :paramtype instance_type: str - :keyword runtime_version: Version of spark runtime used for the job. - :paramtype runtime_version: str - """ - super(SparkResourceConfiguration, self).__init__(**kwargs) - self.instance_type = kwargs.get('instance_type', None) - self.runtime_version = kwargs.get('runtime_version', "3.1") - - -class SslConfiguration(msrest.serialization.Model): - """The ssl configuration for scoring. - - :ivar status: Enable or disable ssl for scoring. Possible values include: "Disabled", - "Enabled", "Auto". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.SslConfigStatus - :ivar cert: Cert data. - :vartype cert: str - :ivar key: Key data. - :vartype key: str - :ivar cname: CNAME of the cert. - :vartype cname: str - :ivar leaf_domain_label: Leaf domain label of public endpoint. - :vartype leaf_domain_label: str - :ivar overwrite_existing_domain: Indicates whether to overwrite existing domain label. - :vartype overwrite_existing_domain: bool - """ - - _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'cert': {'key': 'cert', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'str'}, - 'cname': {'key': 'cname', 'type': 'str'}, - 'leaf_domain_label': {'key': 'leafDomainLabel', 'type': 'str'}, - 'overwrite_existing_domain': {'key': 'overwriteExistingDomain', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword status: Enable or disable ssl for scoring. Possible values include: "Disabled", - "Enabled", "Auto". - :paramtype status: str or ~azure.mgmt.machinelearningservices.models.SslConfigStatus - :keyword cert: Cert data. - :paramtype cert: str - :keyword key: Key data. - :paramtype key: str - :keyword cname: CNAME of the cert. - :paramtype cname: str - :keyword leaf_domain_label: Leaf domain label of public endpoint. - :paramtype leaf_domain_label: str - :keyword overwrite_existing_domain: Indicates whether to overwrite existing domain label. - :paramtype overwrite_existing_domain: bool - """ - super(SslConfiguration, self).__init__(**kwargs) - self.status = kwargs.get('status', None) - self.cert = kwargs.get('cert', None) - self.key = kwargs.get('key', None) - self.cname = kwargs.get('cname', None) - self.leaf_domain_label = kwargs.get('leaf_domain_label', None) - self.overwrite_existing_domain = kwargs.get('overwrite_existing_domain', None) - - -class StackEnsembleSettings(msrest.serialization.Model): - """Advances setting to customize StackEnsemble run. - - :ivar stack_meta_learner_k_wargs: Optional parameters to pass to the initializer of the - meta-learner. - :vartype stack_meta_learner_k_wargs: any - :ivar stack_meta_learner_train_percentage: Specifies the proportion of the training set (when - choosing train and validation type of training) to be reserved for training the meta-learner. - Default value is 0.2. - :vartype stack_meta_learner_train_percentage: float - :ivar stack_meta_learner_type: The meta-learner is a model trained on the output of the - individual heterogeneous models. Possible values include: "None", "LogisticRegression", - "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", - "LightGBMRegressor", "LinearRegression". - :vartype stack_meta_learner_type: str or - ~azure.mgmt.machinelearningservices.models.StackMetaLearnerType - """ - - _attribute_map = { - 'stack_meta_learner_k_wargs': {'key': 'stackMetaLearnerKWargs', 'type': 'object'}, - 'stack_meta_learner_train_percentage': {'key': 'stackMetaLearnerTrainPercentage', 'type': 'float'}, - 'stack_meta_learner_type': {'key': 'stackMetaLearnerType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword stack_meta_learner_k_wargs: Optional parameters to pass to the initializer of the - meta-learner. - :paramtype stack_meta_learner_k_wargs: any - :keyword stack_meta_learner_train_percentage: Specifies the proportion of the training set - (when choosing train and validation type of training) to be reserved for training the - meta-learner. Default value is 0.2. - :paramtype stack_meta_learner_train_percentage: float - :keyword stack_meta_learner_type: The meta-learner is a model trained on the output of the - individual heterogeneous models. Possible values include: "None", "LogisticRegression", - "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", - "LightGBMRegressor", "LinearRegression". - :paramtype stack_meta_learner_type: str or - ~azure.mgmt.machinelearningservices.models.StackMetaLearnerType - """ - super(StackEnsembleSettings, self).__init__(**kwargs) - self.stack_meta_learner_k_wargs = kwargs.get('stack_meta_learner_k_wargs', None) - self.stack_meta_learner_train_percentage = kwargs.get('stack_meta_learner_train_percentage', 0.2) - self.stack_meta_learner_type = kwargs.get('stack_meta_learner_type', None) - - -class StaticInputData(MonitoringInputDataBase): - """Static input data definition. - - All required parameters must be populated in order to send to Azure. - - :ivar columns: Mapping of column names to special uses. - :vartype columns: dict[str, str] - :ivar data_context: The context metadata of the data source. - :vartype data_context: str - :ivar input_data_type: Required. [Required] Specifies the type of signal to monitor.Constant - filled by server. Possible values include: "Static", "Trailing", "Fixed". - :vartype input_data_type: str or - ~azure.mgmt.machinelearningservices.models.MonitoringInputDataType - :ivar job_input_type: Required. [Required] Specifies the type of job. Possible values include: - "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str - :ivar preprocessing_component_id: The ARM resource ID of the component resource used to - preprocess the data. - :vartype preprocessing_component_id: str - :ivar window_end: Required. [Required] The end date of the data window. - :vartype window_end: ~datetime.datetime - :ivar window_start: Required. [Required] The start date of the data window. - :vartype window_start: ~datetime.datetime - """ - - _validation = { - 'input_data_type': {'required': True}, - 'job_input_type': {'required': True}, - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'window_end': {'required': True}, - 'window_start': {'required': True}, - } - - _attribute_map = { - 'columns': {'key': 'columns', 'type': '{str}'}, - 'data_context': {'key': 'dataContext', 'type': 'str'}, - 'input_data_type': {'key': 'inputDataType', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'preprocessing_component_id': {'key': 'preprocessingComponentId', 'type': 'str'}, - 'window_end': {'key': 'windowEnd', 'type': 'iso-8601'}, - 'window_start': {'key': 'windowStart', 'type': 'iso-8601'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword columns: Mapping of column names to special uses. - :paramtype columns: dict[str, str] - :keyword data_context: The context metadata of the data source. - :paramtype data_context: str - :keyword job_input_type: Required. [Required] Specifies the type of job. Possible values - include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", - "triton_model". - :paramtype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str - :keyword preprocessing_component_id: The ARM resource ID of the component resource used to - preprocess the data. - :paramtype preprocessing_component_id: str - :keyword window_end: Required. [Required] The end date of the data window. - :paramtype window_end: ~datetime.datetime - :keyword window_start: Required. [Required] The start date of the data window. - :paramtype window_start: ~datetime.datetime - """ - super(StaticInputData, self).__init__(**kwargs) - self.input_data_type = 'Static' # type: str - self.preprocessing_component_id = kwargs.get('preprocessing_component_id', None) - self.window_end = kwargs['window_end'] - self.window_start = kwargs['window_start'] - - -class StatusMessage(msrest.serialization.Model): - """Active message associated with project. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar code: Service-defined message code. - :vartype code: str - :ivar created_date_time: Time in UTC at which the message was created. - :vartype created_date_time: ~datetime.datetime - :ivar level: Severity level of message. Possible values include: "Error", "Information", - "Warning". - :vartype level: str or ~azure.mgmt.machinelearningservices.models.StatusMessageLevel - :ivar message: A human-readable representation of the message code. - :vartype message: str - """ - - _validation = { - 'code': {'readonly': True}, - 'created_date_time': {'readonly': True}, - 'level': {'readonly': True}, - 'message': {'readonly': True}, - } - - _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'}, - 'level': {'key': 'level', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(StatusMessage, self).__init__(**kwargs) - self.code = None - self.created_date_time = None - self.level = None - self.message = None - - -class StorageAccountDetails(msrest.serialization.Model): - """Details of storage account to be used for the Registry. - - :ivar system_created_storage_account: Details of system created storage account to be used for - the registry. - :vartype system_created_storage_account: - ~azure.mgmt.machinelearningservices.models.SystemCreatedStorageAccount - :ivar user_created_storage_account: Details of user created storage account to be used for the - registry. - :vartype user_created_storage_account: - ~azure.mgmt.machinelearningservices.models.UserCreatedStorageAccount - """ - - _attribute_map = { - 'system_created_storage_account': {'key': 'systemCreatedStorageAccount', 'type': 'SystemCreatedStorageAccount'}, - 'user_created_storage_account': {'key': 'userCreatedStorageAccount', 'type': 'UserCreatedStorageAccount'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword system_created_storage_account: Details of system created storage account to be used - for the registry. - :paramtype system_created_storage_account: - ~azure.mgmt.machinelearningservices.models.SystemCreatedStorageAccount - :keyword user_created_storage_account: Details of user created storage account to be used for - the registry. - :paramtype user_created_storage_account: - ~azure.mgmt.machinelearningservices.models.UserCreatedStorageAccount - """ - super(StorageAccountDetails, self).__init__(**kwargs) - self.system_created_storage_account = kwargs.get('system_created_storage_account', None) - self.user_created_storage_account = kwargs.get('user_created_storage_account', None) - - -class SweepJob(JobBaseProperties): - """Sweep job definition. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar component_id: ARM resource ID of the component resource. - :vartype component_id: str - :ivar compute_id: ARM resource ID of the compute resource. - :vartype compute_id: str - :ivar display_name: Display name of job. - :vartype display_name: str - :ivar experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :vartype experiment_name: str - :ivar identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :ivar is_archived: Is the asset archived?. - :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". - :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType - :ivar notification_setting: Notification setting for the job. - :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :ivar secrets_configuration: Configuration for secrets to be made available during runtime. - :vartype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :ivar services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus - :ivar component_configuration: Component Configuration for sweep over component. - :vartype component_configuration: - ~azure.mgmt.machinelearningservices.models.ComponentConfiguration - :ivar early_termination: Early termination policies enable canceling poor-performing runs - before they complete. - :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :ivar inputs: Mapping of input data bindings used in the job. - :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] - :ivar limits: Sweep Job limit. - :vartype limits: ~azure.mgmt.machinelearningservices.models.SweepJobLimits - :ivar objective: Required. [Required] Optimization objective. - :vartype objective: ~azure.mgmt.machinelearningservices.models.Objective - :ivar outputs: Mapping of output data bindings used in the job. - :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] - :ivar queue_settings: Queue settings for the job. - :vartype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings - :ivar resources: Compute Resource configuration for the job. - :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration - :ivar sampling_algorithm: Required. [Required] The hyperparameter sampling algorithm. - :vartype sampling_algorithm: ~azure.mgmt.machinelearningservices.models.SamplingAlgorithm - :ivar search_space: Required. [Required] A dictionary containing each parameter and its - distribution. The dictionary key is the name of the parameter. - :vartype search_space: any - :ivar trial: Required. [Required] Trial component definition. - :vartype trial: ~azure.mgmt.machinelearningservices.models.TrialComponent - """ - - _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'objective': {'required': True}, - 'sampling_algorithm': {'required': True}, - 'search_space': {'required': True}, - 'trial': {'required': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'component_configuration': {'key': 'componentConfiguration', 'type': 'ComponentConfiguration'}, - 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'limits': {'key': 'limits', 'type': 'SweepJobLimits'}, - 'objective': {'key': 'objective', 'type': 'Objective'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'queue_settings': {'key': 'queueSettings', 'type': 'QueueSettings'}, - 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, - 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'SamplingAlgorithm'}, - 'search_space': {'key': 'searchSpace', 'type': 'object'}, - 'trial': {'key': 'trial', 'type': 'TrialComponent'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword component_id: ARM resource ID of the component resource. - :paramtype component_id: str - :keyword compute_id: ARM resource ID of the compute resource. - :paramtype compute_id: str - :keyword display_name: Display name of job. - :paramtype display_name: str - :keyword experiment_name: The name of the experiment the job belongs to. If not set, the job is - placed in the "Default" experiment. - :paramtype experiment_name: str - :keyword identity: Identity configuration. If set, this should be one of AmlToken, - ManagedIdentity, UserIdentity or null. - Defaults to AmlToken if null. - :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration - :keyword is_archived: Is the asset archived?. - :paramtype is_archived: bool - :keyword notification_setting: Notification setting for the job. - :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting - :keyword secrets_configuration: Configuration for secrets to be made available during runtime. - :paramtype secrets_configuration: dict[str, - ~azure.mgmt.machinelearningservices.models.SecretConfiguration] - :keyword services: List of JobEndpoints. - For local jobs, a job endpoint will have an endpoint value of FileStreamObject. - :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :keyword component_configuration: Component Configuration for sweep over component. - :paramtype component_configuration: - ~azure.mgmt.machinelearningservices.models.ComponentConfiguration - :keyword early_termination: Early termination policies enable canceling poor-performing runs - before they complete. - :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :keyword inputs: Mapping of input data bindings used in the job. - :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] - :keyword limits: Sweep Job limit. - :paramtype limits: ~azure.mgmt.machinelearningservices.models.SweepJobLimits - :keyword objective: Required. [Required] Optimization objective. - :paramtype objective: ~azure.mgmt.machinelearningservices.models.Objective - :keyword outputs: Mapping of output data bindings used in the job. - :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] - :keyword queue_settings: Queue settings for the job. - :paramtype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings - :keyword resources: Compute Resource configuration for the job. - :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration - :keyword sampling_algorithm: Required. [Required] The hyperparameter sampling algorithm. - :paramtype sampling_algorithm: ~azure.mgmt.machinelearningservices.models.SamplingAlgorithm - :keyword search_space: Required. [Required] A dictionary containing each parameter and its - distribution. The dictionary key is the name of the parameter. - :paramtype search_space: any - :keyword trial: Required. [Required] Trial component definition. - :paramtype trial: ~azure.mgmt.machinelearningservices.models.TrialComponent - """ - super(SweepJob, self).__init__(**kwargs) - self.job_type = 'Sweep' # type: str - self.component_configuration = kwargs.get('component_configuration', None) - self.early_termination = kwargs.get('early_termination', None) - self.inputs = kwargs.get('inputs', None) - self.limits = kwargs.get('limits', None) - self.objective = kwargs['objective'] - self.outputs = kwargs.get('outputs', None) - self.queue_settings = kwargs.get('queue_settings', None) - self.resources = kwargs.get('resources', None) - self.sampling_algorithm = kwargs['sampling_algorithm'] - self.search_space = kwargs['search_space'] - self.trial = kwargs['trial'] - - -class SweepJobLimits(JobLimits): - """Sweep Job limit class. - - All required parameters must be populated in order to send to Azure. - - :ivar job_limits_type: Required. [Required] JobLimit type.Constant filled by server. Possible - values include: "Command", "Sweep". - :vartype job_limits_type: str or ~azure.mgmt.machinelearningservices.models.JobLimitsType - :ivar timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. - Only supports duration with precision as low as Seconds. - :vartype timeout: ~datetime.timedelta - :ivar max_concurrent_trials: Sweep Job max concurrent trials. - :vartype max_concurrent_trials: int - :ivar max_total_trials: Sweep Job max total trials. - :vartype max_total_trials: int - :ivar trial_timeout: Sweep Job Trial timeout value. - :vartype trial_timeout: ~datetime.timedelta - """ - - _validation = { - 'job_limits_type': {'required': True}, - } - - _attribute_map = { - 'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, - 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, - 'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'}, - 'trial_timeout': {'key': 'trialTimeout', 'type': 'duration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword timeout: The max run duration in ISO 8601 format, after which the job will be - cancelled. Only supports duration with precision as low as Seconds. - :paramtype timeout: ~datetime.timedelta - :keyword max_concurrent_trials: Sweep Job max concurrent trials. - :paramtype max_concurrent_trials: int - :keyword max_total_trials: Sweep Job max total trials. - :paramtype max_total_trials: int - :keyword trial_timeout: Sweep Job Trial timeout value. - :paramtype trial_timeout: ~datetime.timedelta - """ - super(SweepJobLimits, self).__init__(**kwargs) - self.job_limits_type = 'Sweep' # type: str - self.max_concurrent_trials = kwargs.get('max_concurrent_trials', None) - self.max_total_trials = kwargs.get('max_total_trials', None) - self.trial_timeout = kwargs.get('trial_timeout', None) - - -class SynapseSpark(Compute): - """A SynapseSpark compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar compute_location: Location for the underlying compute. - :vartype compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar description: The description of the Machine Learning compute. - :vartype description: str - :ivar created_on: The time at which the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The time at which the compute was last modified. - :vartype modified_on: ~datetime.datetime - :ivar resource_id: ARM resource id of the underlying compute. - :vartype resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI - and AAD exclusively for authentication. - :vartype disable_local_auth: bool - :ivar properties: - :vartype properties: ~azure.mgmt.machinelearningservices.models.SynapseSparkProperties - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'SynapseSparkProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword compute_location: Location for the underlying compute. - :paramtype compute_location: str - :keyword description: The description of the Machine Learning compute. - :paramtype description: str - :keyword resource_id: ARM resource id of the underlying compute. - :paramtype resource_id: str - :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only - MSI and AAD exclusively for authentication. - :paramtype disable_local_auth: bool - :keyword properties: - :paramtype properties: ~azure.mgmt.machinelearningservices.models.SynapseSparkProperties - """ - super(SynapseSpark, self).__init__(**kwargs) - self.compute_type = 'SynapseSpark' # type: str - self.properties = kwargs.get('properties', None) - - -class SynapseSparkProperties(msrest.serialization.Model): - """SynapseSparkProperties. - - :ivar auto_scale_properties: Auto scale properties. - :vartype auto_scale_properties: ~azure.mgmt.machinelearningservices.models.AutoScaleProperties - :ivar auto_pause_properties: Auto pause properties. - :vartype auto_pause_properties: ~azure.mgmt.machinelearningservices.models.AutoPauseProperties - :ivar spark_version: Spark version. - :vartype spark_version: str - :ivar node_count: The number of compute nodes currently assigned to the compute. - :vartype node_count: int - :ivar node_size: Node size. - :vartype node_size: str - :ivar node_size_family: Node size family. - :vartype node_size_family: str - :ivar subscription_id: Azure subscription identifier. - :vartype subscription_id: str - :ivar resource_group: Name of the resource group in which workspace is located. - :vartype resource_group: str - :ivar workspace_name: Name of Azure Machine Learning workspace. - :vartype workspace_name: str - :ivar pool_name: Pool name. - :vartype pool_name: str - """ - - _attribute_map = { - 'auto_scale_properties': {'key': 'autoScaleProperties', 'type': 'AutoScaleProperties'}, - 'auto_pause_properties': {'key': 'autoPauseProperties', 'type': 'AutoPauseProperties'}, - 'spark_version': {'key': 'sparkVersion', 'type': 'str'}, - 'node_count': {'key': 'nodeCount', 'type': 'int'}, - 'node_size': {'key': 'nodeSize', 'type': 'str'}, - 'node_size_family': {'key': 'nodeSizeFamily', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, - 'pool_name': {'key': 'poolName', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword auto_scale_properties: Auto scale properties. - :paramtype auto_scale_properties: - ~azure.mgmt.machinelearningservices.models.AutoScaleProperties - :keyword auto_pause_properties: Auto pause properties. - :paramtype auto_pause_properties: - ~azure.mgmt.machinelearningservices.models.AutoPauseProperties - :keyword spark_version: Spark version. - :paramtype spark_version: str - :keyword node_count: The number of compute nodes currently assigned to the compute. - :paramtype node_count: int - :keyword node_size: Node size. - :paramtype node_size: str - :keyword node_size_family: Node size family. - :paramtype node_size_family: str - :keyword subscription_id: Azure subscription identifier. - :paramtype subscription_id: str - :keyword resource_group: Name of the resource group in which workspace is located. - :paramtype resource_group: str - :keyword workspace_name: Name of Azure Machine Learning workspace. - :paramtype workspace_name: str - :keyword pool_name: Pool name. - :paramtype pool_name: str - """ - super(SynapseSparkProperties, self).__init__(**kwargs) - self.auto_scale_properties = kwargs.get('auto_scale_properties', None) - self.auto_pause_properties = kwargs.get('auto_pause_properties', None) - self.spark_version = kwargs.get('spark_version', None) - self.node_count = kwargs.get('node_count', None) - self.node_size = kwargs.get('node_size', None) - self.node_size_family = kwargs.get('node_size_family', None) - self.subscription_id = kwargs.get('subscription_id', None) - self.resource_group = kwargs.get('resource_group', None) - self.workspace_name = kwargs.get('workspace_name', None) - self.pool_name = kwargs.get('pool_name', None) - - -class SystemCreatedAcrAccount(msrest.serialization.Model): - """SystemCreatedAcrAccount. - - :ivar acr_account_name: Name of the ACR account. - :vartype acr_account_name: str - :ivar acr_account_sku: SKU of the ACR account. - :vartype acr_account_sku: str - :ivar arm_resource_id: This is populated once the ACR account is created. - :vartype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId - """ - - _attribute_map = { - 'acr_account_name': {'key': 'acrAccountName', 'type': 'str'}, - 'acr_account_sku': {'key': 'acrAccountSku', 'type': 'str'}, - 'arm_resource_id': {'key': 'armResourceId', 'type': 'ArmResourceId'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword acr_account_name: Name of the ACR account. - :paramtype acr_account_name: str - :keyword acr_account_sku: SKU of the ACR account. - :paramtype acr_account_sku: str - :keyword arm_resource_id: This is populated once the ACR account is created. - :paramtype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId - """ - super(SystemCreatedAcrAccount, self).__init__(**kwargs) - self.acr_account_name = kwargs.get('acr_account_name', None) - self.acr_account_sku = kwargs.get('acr_account_sku', None) - self.arm_resource_id = kwargs.get('arm_resource_id', None) - - -class SystemCreatedStorageAccount(msrest.serialization.Model): - """SystemCreatedStorageAccount. - - :ivar allow_blob_public_access: Public blob access allowed. - :vartype allow_blob_public_access: bool - :ivar arm_resource_id: This is populated once the storage account is created. - :vartype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId - :ivar storage_account_hns_enabled: HNS enabled for storage account. - :vartype storage_account_hns_enabled: bool - :ivar storage_account_name: Name of the storage account. - :vartype storage_account_name: str - :ivar storage_account_type: Allowed values: - "Standard_LRS", - "Standard_GRS", - "Standard_RAGRS", - "Standard_ZRS", - "Standard_GZRS", - "Standard_RAGZRS", - "Premium_LRS", - "Premium_ZRS". - :vartype storage_account_type: str - """ - - _attribute_map = { - 'allow_blob_public_access': {'key': 'allowBlobPublicAccess', 'type': 'bool'}, - 'arm_resource_id': {'key': 'armResourceId', 'type': 'ArmResourceId'}, - 'storage_account_hns_enabled': {'key': 'storageAccountHnsEnabled', 'type': 'bool'}, - 'storage_account_name': {'key': 'storageAccountName', 'type': 'str'}, - 'storage_account_type': {'key': 'storageAccountType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword allow_blob_public_access: Public blob access allowed. - :paramtype allow_blob_public_access: bool - :keyword arm_resource_id: This is populated once the storage account is created. - :paramtype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId - :keyword storage_account_hns_enabled: HNS enabled for storage account. - :paramtype storage_account_hns_enabled: bool - :keyword storage_account_name: Name of the storage account. - :paramtype storage_account_name: str - :keyword storage_account_type: Allowed values: - "Standard_LRS", - "Standard_GRS", - "Standard_RAGRS", - "Standard_ZRS", - "Standard_GZRS", - "Standard_RAGZRS", - "Premium_LRS", - "Premium_ZRS". - :paramtype storage_account_type: str - """ - super(SystemCreatedStorageAccount, self).__init__(**kwargs) - self.allow_blob_public_access = kwargs.get('allow_blob_public_access', None) - self.arm_resource_id = kwargs.get('arm_resource_id', None) - self.storage_account_hns_enabled = kwargs.get('storage_account_hns_enabled', None) - self.storage_account_name = kwargs.get('storage_account_name', None) - self.storage_account_type = kwargs.get('storage_account_type', None) - - -class SystemData(msrest.serialization.Model): - """Metadata pertaining to creation and last modification of the resource. - - :ivar created_by: The identity that created the resource. - :vartype created_by: str - :ivar created_by_type: The type of identity that created the resource. Possible values include: - "User", "Application", "ManagedIdentity", "Key". - :vartype created_by_type: str or ~azure.mgmt.machinelearningservices.models.CreatedByType - :ivar created_at: The timestamp of resource creation (UTC). - :vartype created_at: ~datetime.datetime - :ivar last_modified_by: The identity that last modified the resource. - :vartype last_modified_by: str - :ivar last_modified_by_type: The type of identity that last modified the resource. Possible - values include: "User", "Application", "ManagedIdentity", "Key". - :vartype last_modified_by_type: str or ~azure.mgmt.machinelearningservices.models.CreatedByType - :ivar last_modified_at: The timestamp of resource last modification (UTC). - :vartype last_modified_at: ~datetime.datetime - """ - - _attribute_map = { - 'created_by': {'key': 'createdBy', 'type': 'str'}, - 'created_by_type': {'key': 'createdByType', 'type': 'str'}, - 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, - 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, - 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, - 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword created_by: The identity that created the resource. - :paramtype created_by: str - :keyword created_by_type: The type of identity that created the resource. Possible values - include: "User", "Application", "ManagedIdentity", "Key". - :paramtype created_by_type: str or ~azure.mgmt.machinelearningservices.models.CreatedByType - :keyword created_at: The timestamp of resource creation (UTC). - :paramtype created_at: ~datetime.datetime - :keyword last_modified_by: The identity that last modified the resource. - :paramtype last_modified_by: str - :keyword last_modified_by_type: The type of identity that last modified the resource. Possible - values include: "User", "Application", "ManagedIdentity", "Key". - :paramtype last_modified_by_type: str or - ~azure.mgmt.machinelearningservices.models.CreatedByType - :keyword last_modified_at: The timestamp of resource last modification (UTC). - :paramtype last_modified_at: ~datetime.datetime - """ - super(SystemData, self).__init__(**kwargs) - self.created_by = kwargs.get('created_by', None) - self.created_by_type = kwargs.get('created_by_type', None) - self.created_at = kwargs.get('created_at', None) - self.last_modified_by = kwargs.get('last_modified_by', None) - self.last_modified_by_type = kwargs.get('last_modified_by_type', None) - self.last_modified_at = kwargs.get('last_modified_at', None) - - -class SystemService(msrest.serialization.Model): - """A system service running on a compute. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar system_service_type: The type of this system service. - :vartype system_service_type: str - :ivar public_ip_address: Public IP address. - :vartype public_ip_address: str - :ivar version: The version for this type. - :vartype version: str - """ - - _validation = { - 'system_service_type': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'version': {'readonly': True}, - } - - _attribute_map = { - 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(SystemService, self).__init__(**kwargs) - self.system_service_type = None - self.public_ip_address = None - self.version = None - - -class TableFixedParameters(msrest.serialization.Model): - """Fixed training parameters that won't be swept over during AutoML Table training. - - :ivar booster: Specify the boosting type, e.g gbdt for XGBoost. - :vartype booster: str - :ivar boosting_type: Specify the boosting type, e.g gbdt for LightGBM. - :vartype boosting_type: str - :ivar grow_policy: Specify the grow policy, which controls the way new nodes are added to the - tree. - :vartype grow_policy: str - :ivar learning_rate: The learning rate for the training procedure. - :vartype learning_rate: float - :ivar max_bin: Specify the Maximum number of discrete bins to bucket continuous features . - :vartype max_bin: int - :ivar max_depth: Specify the max depth to limit the tree depth explicitly. - :vartype max_depth: int - :ivar max_leaves: Specify the max leaves to limit the tree leaves explicitly. - :vartype max_leaves: int - :ivar min_data_in_leaf: The minimum number of data per leaf. - :vartype min_data_in_leaf: int - :ivar min_split_gain: Minimum loss reduction required to make a further partition on a leaf - node of the tree. - :vartype min_split_gain: float - :ivar model_name: The name of the model to train. - :vartype model_name: str - :ivar n_estimators: Specify the number of trees (or rounds) in an model. - :vartype n_estimators: int - :ivar num_leaves: Specify the number of leaves. - :vartype num_leaves: int - :ivar preprocessor_name: The name of the preprocessor to use. - :vartype preprocessor_name: str - :ivar reg_alpha: L1 regularization term on weights. - :vartype reg_alpha: float - :ivar reg_lambda: L2 regularization term on weights. - :vartype reg_lambda: float - :ivar subsample: Subsample ratio of the training instance. - :vartype subsample: float - :ivar subsample_freq: Frequency of subsample. - :vartype subsample_freq: float - :ivar tree_method: Specify the tree method. - :vartype tree_method: str - :ivar with_mean: If true, center before scaling the data with StandardScalar. - :vartype with_mean: bool - :ivar with_std: If true, scaling the data with Unit Variance with StandardScalar. - :vartype with_std: bool - """ - - _attribute_map = { - 'booster': {'key': 'booster', 'type': 'str'}, - 'boosting_type': {'key': 'boostingType', 'type': 'str'}, - 'grow_policy': {'key': 'growPolicy', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'max_bin': {'key': 'maxBin', 'type': 'int'}, - 'max_depth': {'key': 'maxDepth', 'type': 'int'}, - 'max_leaves': {'key': 'maxLeaves', 'type': 'int'}, - 'min_data_in_leaf': {'key': 'minDataInLeaf', 'type': 'int'}, - 'min_split_gain': {'key': 'minSplitGain', 'type': 'float'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'n_estimators': {'key': 'nEstimators', 'type': 'int'}, - 'num_leaves': {'key': 'numLeaves', 'type': 'int'}, - 'preprocessor_name': {'key': 'preprocessorName', 'type': 'str'}, - 'reg_alpha': {'key': 'regAlpha', 'type': 'float'}, - 'reg_lambda': {'key': 'regLambda', 'type': 'float'}, - 'subsample': {'key': 'subsample', 'type': 'float'}, - 'subsample_freq': {'key': 'subsampleFreq', 'type': 'float'}, - 'tree_method': {'key': 'treeMethod', 'type': 'str'}, - 'with_mean': {'key': 'withMean', 'type': 'bool'}, - 'with_std': {'key': 'withStd', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword booster: Specify the boosting type, e.g gbdt for XGBoost. - :paramtype booster: str - :keyword boosting_type: Specify the boosting type, e.g gbdt for LightGBM. - :paramtype boosting_type: str - :keyword grow_policy: Specify the grow policy, which controls the way new nodes are added to - the tree. - :paramtype grow_policy: str - :keyword learning_rate: The learning rate for the training procedure. - :paramtype learning_rate: float - :keyword max_bin: Specify the Maximum number of discrete bins to bucket continuous features . - :paramtype max_bin: int - :keyword max_depth: Specify the max depth to limit the tree depth explicitly. - :paramtype max_depth: int - :keyword max_leaves: Specify the max leaves to limit the tree leaves explicitly. - :paramtype max_leaves: int - :keyword min_data_in_leaf: The minimum number of data per leaf. - :paramtype min_data_in_leaf: int - :keyword min_split_gain: Minimum loss reduction required to make a further partition on a leaf - node of the tree. - :paramtype min_split_gain: float - :keyword model_name: The name of the model to train. - :paramtype model_name: str - :keyword n_estimators: Specify the number of trees (or rounds) in an model. - :paramtype n_estimators: int - :keyword num_leaves: Specify the number of leaves. - :paramtype num_leaves: int - :keyword preprocessor_name: The name of the preprocessor to use. - :paramtype preprocessor_name: str - :keyword reg_alpha: L1 regularization term on weights. - :paramtype reg_alpha: float - :keyword reg_lambda: L2 regularization term on weights. - :paramtype reg_lambda: float - :keyword subsample: Subsample ratio of the training instance. - :paramtype subsample: float - :keyword subsample_freq: Frequency of subsample. - :paramtype subsample_freq: float - :keyword tree_method: Specify the tree method. - :paramtype tree_method: str - :keyword with_mean: If true, center before scaling the data with StandardScalar. - :paramtype with_mean: bool - :keyword with_std: If true, scaling the data with Unit Variance with StandardScalar. - :paramtype with_std: bool - """ - super(TableFixedParameters, self).__init__(**kwargs) - self.booster = kwargs.get('booster', None) - self.boosting_type = kwargs.get('boosting_type', None) - self.grow_policy = kwargs.get('grow_policy', None) - self.learning_rate = kwargs.get('learning_rate', None) - self.max_bin = kwargs.get('max_bin', None) - self.max_depth = kwargs.get('max_depth', None) - self.max_leaves = kwargs.get('max_leaves', None) - self.min_data_in_leaf = kwargs.get('min_data_in_leaf', None) - self.min_split_gain = kwargs.get('min_split_gain', None) - self.model_name = kwargs.get('model_name', None) - self.n_estimators = kwargs.get('n_estimators', None) - self.num_leaves = kwargs.get('num_leaves', None) - self.preprocessor_name = kwargs.get('preprocessor_name', None) - self.reg_alpha = kwargs.get('reg_alpha', None) - self.reg_lambda = kwargs.get('reg_lambda', None) - self.subsample = kwargs.get('subsample', None) - self.subsample_freq = kwargs.get('subsample_freq', None) - self.tree_method = kwargs.get('tree_method', None) - self.with_mean = kwargs.get('with_mean', False) - self.with_std = kwargs.get('with_std', False) - - -class TableParameterSubspace(msrest.serialization.Model): - """TableParameterSubspace. - - :ivar booster: Specify the boosting type, e.g gbdt for XGBoost. - :vartype booster: str - :ivar boosting_type: Specify the boosting type, e.g gbdt for LightGBM. - :vartype boosting_type: str - :ivar grow_policy: Specify the grow policy, which controls the way new nodes are added to the - tree. - :vartype grow_policy: str - :ivar learning_rate: The learning rate for the training procedure. - :vartype learning_rate: str - :ivar max_bin: Specify the Maximum number of discrete bins to bucket continuous features . - :vartype max_bin: str - :ivar max_depth: Specify the max depth to limit the tree depth explicitly. - :vartype max_depth: str - :ivar max_leaves: Specify the max leaves to limit the tree leaves explicitly. - :vartype max_leaves: str - :ivar min_data_in_leaf: The minimum number of data per leaf. - :vartype min_data_in_leaf: str - :ivar min_split_gain: Minimum loss reduction required to make a further partition on a leaf - node of the tree. - :vartype min_split_gain: str - :ivar model_name: The name of the model to train. - :vartype model_name: str - :ivar n_estimators: Specify the number of trees (or rounds) in an model. - :vartype n_estimators: str - :ivar num_leaves: Specify the number of leaves. - :vartype num_leaves: str - :ivar preprocessor_name: The name of the preprocessor to use. - :vartype preprocessor_name: str - :ivar reg_alpha: L1 regularization term on weights. - :vartype reg_alpha: str - :ivar reg_lambda: L2 regularization term on weights. - :vartype reg_lambda: str - :ivar subsample: Subsample ratio of the training instance. - :vartype subsample: str - :ivar subsample_freq: Frequency of subsample. - :vartype subsample_freq: str - :ivar tree_method: Specify the tree method. - :vartype tree_method: str - :ivar with_mean: If true, center before scaling the data with StandardScalar. - :vartype with_mean: str - :ivar with_std: If true, scaling the data with Unit Variance with StandardScalar. - :vartype with_std: str - """ - - _attribute_map = { - 'booster': {'key': 'booster', 'type': 'str'}, - 'boosting_type': {'key': 'boostingType', 'type': 'str'}, - 'grow_policy': {'key': 'growPolicy', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'max_bin': {'key': 'maxBin', 'type': 'str'}, - 'max_depth': {'key': 'maxDepth', 'type': 'str'}, - 'max_leaves': {'key': 'maxLeaves', 'type': 'str'}, - 'min_data_in_leaf': {'key': 'minDataInLeaf', 'type': 'str'}, - 'min_split_gain': {'key': 'minSplitGain', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'n_estimators': {'key': 'nEstimators', 'type': 'str'}, - 'num_leaves': {'key': 'numLeaves', 'type': 'str'}, - 'preprocessor_name': {'key': 'preprocessorName', 'type': 'str'}, - 'reg_alpha': {'key': 'regAlpha', 'type': 'str'}, - 'reg_lambda': {'key': 'regLambda', 'type': 'str'}, - 'subsample': {'key': 'subsample', 'type': 'str'}, - 'subsample_freq': {'key': 'subsampleFreq', 'type': 'str'}, - 'tree_method': {'key': 'treeMethod', 'type': 'str'}, - 'with_mean': {'key': 'withMean', 'type': 'str'}, - 'with_std': {'key': 'withStd', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword booster: Specify the boosting type, e.g gbdt for XGBoost. - :paramtype booster: str - :keyword boosting_type: Specify the boosting type, e.g gbdt for LightGBM. - :paramtype boosting_type: str - :keyword grow_policy: Specify the grow policy, which controls the way new nodes are added to - the tree. - :paramtype grow_policy: str - :keyword learning_rate: The learning rate for the training procedure. - :paramtype learning_rate: str - :keyword max_bin: Specify the Maximum number of discrete bins to bucket continuous features . - :paramtype max_bin: str - :keyword max_depth: Specify the max depth to limit the tree depth explicitly. - :paramtype max_depth: str - :keyword max_leaves: Specify the max leaves to limit the tree leaves explicitly. - :paramtype max_leaves: str - :keyword min_data_in_leaf: The minimum number of data per leaf. - :paramtype min_data_in_leaf: str - :keyword min_split_gain: Minimum loss reduction required to make a further partition on a leaf - node of the tree. - :paramtype min_split_gain: str - :keyword model_name: The name of the model to train. - :paramtype model_name: str - :keyword n_estimators: Specify the number of trees (or rounds) in an model. - :paramtype n_estimators: str - :keyword num_leaves: Specify the number of leaves. - :paramtype num_leaves: str - :keyword preprocessor_name: The name of the preprocessor to use. - :paramtype preprocessor_name: str - :keyword reg_alpha: L1 regularization term on weights. - :paramtype reg_alpha: str - :keyword reg_lambda: L2 regularization term on weights. - :paramtype reg_lambda: str - :keyword subsample: Subsample ratio of the training instance. - :paramtype subsample: str - :keyword subsample_freq: Frequency of subsample. - :paramtype subsample_freq: str - :keyword tree_method: Specify the tree method. - :paramtype tree_method: str - :keyword with_mean: If true, center before scaling the data with StandardScalar. - :paramtype with_mean: str - :keyword with_std: If true, scaling the data with Unit Variance with StandardScalar. - :paramtype with_std: str - """ - super(TableParameterSubspace, self).__init__(**kwargs) - self.booster = kwargs.get('booster', None) - self.boosting_type = kwargs.get('boosting_type', None) - self.grow_policy = kwargs.get('grow_policy', None) - self.learning_rate = kwargs.get('learning_rate', None) - self.max_bin = kwargs.get('max_bin', None) - self.max_depth = kwargs.get('max_depth', None) - self.max_leaves = kwargs.get('max_leaves', None) - self.min_data_in_leaf = kwargs.get('min_data_in_leaf', None) - self.min_split_gain = kwargs.get('min_split_gain', None) - self.model_name = kwargs.get('model_name', None) - self.n_estimators = kwargs.get('n_estimators', None) - self.num_leaves = kwargs.get('num_leaves', None) - self.preprocessor_name = kwargs.get('preprocessor_name', None) - self.reg_alpha = kwargs.get('reg_alpha', None) - self.reg_lambda = kwargs.get('reg_lambda', None) - self.subsample = kwargs.get('subsample', None) - self.subsample_freq = kwargs.get('subsample_freq', None) - self.tree_method = kwargs.get('tree_method', None) - self.with_mean = kwargs.get('with_mean', None) - self.with_std = kwargs.get('with_std', None) - - -class TableSweepSettings(msrest.serialization.Model): - """TableSweepSettings. - - All required parameters must be populated in order to send to Azure. - - :ivar early_termination: Type of early termination policy for the sweeping job. - :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :ivar sampling_algorithm: Required. [Required] Type of sampling algorithm. Possible values - include: "Grid", "Random", "Bayesian". - :vartype sampling_algorithm: str or - ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType - """ - - _validation = { - 'sampling_algorithm': {'required': True}, - } - - _attribute_map = { - 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, - 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword early_termination: Type of early termination policy for the sweeping job. - :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :keyword sampling_algorithm: Required. [Required] Type of sampling algorithm. Possible values - include: "Grid", "Random", "Bayesian". - :paramtype sampling_algorithm: str or - ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType - """ - super(TableSweepSettings, self).__init__(**kwargs) - self.early_termination = kwargs.get('early_termination', None) - self.sampling_algorithm = kwargs['sampling_algorithm'] - - -class TableVerticalFeaturizationSettings(FeaturizationSettings): - """Featurization Configuration. - - :ivar dataset_language: Dataset language, useful for the text data. - :vartype dataset_language: str - :ivar blocked_transformers: These transformers shall not be used in featurization. - :vartype blocked_transformers: list[str or - ~azure.mgmt.machinelearningservices.models.BlockedTransformers] - :ivar column_name_and_types: Dictionary of column name and its type (int, float, string, - datetime etc). - :vartype column_name_and_types: dict[str, str] - :ivar enable_dnn_featurization: Determines whether to use Dnn based featurizers for data - featurization. - :vartype enable_dnn_featurization: bool - :ivar mode: Featurization mode - User can keep the default 'Auto' mode and AutoML will take - care of necessary transformation of the data in featurization phase. - If 'Off' is selected then no featurization is done. - If 'Custom' is selected then user can specify additional inputs to customize how featurization - is done. Possible values include: "Auto", "Custom", "Off". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.FeaturizationMode - :ivar transformer_params: User can specify additional transformers to be used along with the - columns to which it would be applied and parameters for the transformer constructor. - :vartype transformer_params: dict[str, - list[~azure.mgmt.machinelearningservices.models.ColumnTransformer]] - """ - - _attribute_map = { - 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, - 'blocked_transformers': {'key': 'blockedTransformers', 'type': '[str]'}, - 'column_name_and_types': {'key': 'columnNameAndTypes', 'type': '{str}'}, - 'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'transformer_params': {'key': 'transformerParams', 'type': '{[ColumnTransformer]}'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword dataset_language: Dataset language, useful for the text data. - :paramtype dataset_language: str - :keyword blocked_transformers: These transformers shall not be used in featurization. - :paramtype blocked_transformers: list[str or - ~azure.mgmt.machinelearningservices.models.BlockedTransformers] - :keyword column_name_and_types: Dictionary of column name and its type (int, float, string, - datetime etc). - :paramtype column_name_and_types: dict[str, str] - :keyword enable_dnn_featurization: Determines whether to use Dnn based featurizers for data - featurization. - :paramtype enable_dnn_featurization: bool - :keyword mode: Featurization mode - User can keep the default 'Auto' mode and AutoML will take - care of necessary transformation of the data in featurization phase. - If 'Off' is selected then no featurization is done. - If 'Custom' is selected then user can specify additional inputs to customize how featurization - is done. Possible values include: "Auto", "Custom", "Off". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.FeaturizationMode - :keyword transformer_params: User can specify additional transformers to be used along with the - columns to which it would be applied and parameters for the transformer constructor. - :paramtype transformer_params: dict[str, - list[~azure.mgmt.machinelearningservices.models.ColumnTransformer]] - """ - super(TableVerticalFeaturizationSettings, self).__init__(**kwargs) - self.blocked_transformers = kwargs.get('blocked_transformers', None) - self.column_name_and_types = kwargs.get('column_name_and_types', None) - self.enable_dnn_featurization = kwargs.get('enable_dnn_featurization', False) - self.mode = kwargs.get('mode', None) - self.transformer_params = kwargs.get('transformer_params', None) - - -class TableVerticalLimitSettings(msrest.serialization.Model): - """Job execution constraints. - - :ivar enable_early_termination: Enable early termination, determines whether or not if - AutoMLJob will terminate early if there is no score improvement in last 20 iterations. - :vartype enable_early_termination: bool - :ivar exit_score: Exit score for the AutoML job. - :vartype exit_score: float - :ivar max_concurrent_trials: Maximum Concurrent iterations. - :vartype max_concurrent_trials: int - :ivar max_cores_per_trial: Max cores per iteration. - :vartype max_cores_per_trial: int - :ivar max_nodes: Maximum nodes to use for the experiment. - :vartype max_nodes: int - :ivar max_trials: Number of iterations. - :vartype max_trials: int - :ivar sweep_concurrent_trials: Number of concurrent sweeping runs that user wants to trigger. - :vartype sweep_concurrent_trials: int - :ivar sweep_trials: Number of sweeping runs that user wants to trigger. - :vartype sweep_trials: int - :ivar timeout: AutoML job timeout. - :vartype timeout: ~datetime.timedelta - :ivar trial_timeout: Iteration timeout. - :vartype trial_timeout: ~datetime.timedelta - """ - - _attribute_map = { - 'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'}, - 'exit_score': {'key': 'exitScore', 'type': 'float'}, - 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, - 'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'}, - 'max_nodes': {'key': 'maxNodes', 'type': 'int'}, - 'max_trials': {'key': 'maxTrials', 'type': 'int'}, - 'sweep_concurrent_trials': {'key': 'sweepConcurrentTrials', 'type': 'int'}, - 'sweep_trials': {'key': 'sweepTrials', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, - 'trial_timeout': {'key': 'trialTimeout', 'type': 'duration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword enable_early_termination: Enable early termination, determines whether or not if - AutoMLJob will terminate early if there is no score improvement in last 20 iterations. - :paramtype enable_early_termination: bool - :keyword exit_score: Exit score for the AutoML job. - :paramtype exit_score: float - :keyword max_concurrent_trials: Maximum Concurrent iterations. - :paramtype max_concurrent_trials: int - :keyword max_cores_per_trial: Max cores per iteration. - :paramtype max_cores_per_trial: int - :keyword max_nodes: Maximum nodes to use for the experiment. - :paramtype max_nodes: int - :keyword max_trials: Number of iterations. - :paramtype max_trials: int - :keyword sweep_concurrent_trials: Number of concurrent sweeping runs that user wants to - trigger. - :paramtype sweep_concurrent_trials: int - :keyword sweep_trials: Number of sweeping runs that user wants to trigger. - :paramtype sweep_trials: int - :keyword timeout: AutoML job timeout. - :paramtype timeout: ~datetime.timedelta - :keyword trial_timeout: Iteration timeout. - :paramtype trial_timeout: ~datetime.timedelta - """ - super(TableVerticalLimitSettings, self).__init__(**kwargs) - self.enable_early_termination = kwargs.get('enable_early_termination', True) - self.exit_score = kwargs.get('exit_score', None) - self.max_concurrent_trials = kwargs.get('max_concurrent_trials', 1) - self.max_cores_per_trial = kwargs.get('max_cores_per_trial', -1) - self.max_nodes = kwargs.get('max_nodes', 1) - self.max_trials = kwargs.get('max_trials', 1000) - self.sweep_concurrent_trials = kwargs.get('sweep_concurrent_trials', 0) - self.sweep_trials = kwargs.get('sweep_trials', 0) - self.timeout = kwargs.get('timeout', "PT6H") - self.trial_timeout = kwargs.get('trial_timeout', "PT30M") - - -class TargetUtilizationScaleSettings(OnlineScaleSettings): - """TargetUtilizationScaleSettings. - - All required parameters must be populated in order to send to Azure. - - :ivar scale_type: Required. [Required] Type of deployment scaling algorithm.Constant filled by - server. Possible values include: "Default", "TargetUtilization". - :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.ScaleType - :ivar max_instances: The maximum number of instances that the deployment can scale to. The - quota will be reserved for max_instances. - :vartype max_instances: int - :ivar min_instances: The minimum number of instances to always be present. - :vartype min_instances: int - :ivar polling_interval: The polling interval in ISO 8691 format. Only supports duration with - precision as low as Seconds. - :vartype polling_interval: ~datetime.timedelta - :ivar target_utilization_percentage: Target CPU usage for the autoscaler. - :vartype target_utilization_percentage: int - """ - - _validation = { - 'scale_type': {'required': True}, - } - - _attribute_map = { - 'scale_type': {'key': 'scaleType', 'type': 'str'}, - 'max_instances': {'key': 'maxInstances', 'type': 'int'}, - 'min_instances': {'key': 'minInstances', 'type': 'int'}, - 'polling_interval': {'key': 'pollingInterval', 'type': 'duration'}, - 'target_utilization_percentage': {'key': 'targetUtilizationPercentage', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword max_instances: The maximum number of instances that the deployment can scale to. The - quota will be reserved for max_instances. - :paramtype max_instances: int - :keyword min_instances: The minimum number of instances to always be present. - :paramtype min_instances: int - :keyword polling_interval: The polling interval in ISO 8691 format. Only supports duration with - precision as low as Seconds. - :paramtype polling_interval: ~datetime.timedelta - :keyword target_utilization_percentage: Target CPU usage for the autoscaler. - :paramtype target_utilization_percentage: int - """ - super(TargetUtilizationScaleSettings, self).__init__(**kwargs) - self.scale_type = 'TargetUtilization' # type: str - self.max_instances = kwargs.get('max_instances', 1) - self.min_instances = kwargs.get('min_instances', 1) - self.polling_interval = kwargs.get('polling_interval', "PT1S") - self.target_utilization_percentage = kwargs.get('target_utilization_percentage', 70) - - -class TensorFlow(DistributionConfiguration): - """TensorFlow distribution configuration. - - All required parameters must be populated in order to send to Azure. - - :ivar distribution_type: Required. [Required] Specifies the type of distribution - framework.Constant filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi", - "Ray". - :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType - :ivar parameter_server_count: Number of parameter server tasks. - :vartype parameter_server_count: int - :ivar worker_count: Number of workers. If not specified, will default to the instance count. - :vartype worker_count: int - """ - - _validation = { - 'distribution_type': {'required': True}, - } - - _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, - 'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'}, - 'worker_count': {'key': 'workerCount', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword parameter_server_count: Number of parameter server tasks. - :paramtype parameter_server_count: int - :keyword worker_count: Number of workers. If not specified, will default to the instance count. - :paramtype worker_count: int - """ - super(TensorFlow, self).__init__(**kwargs) - self.distribution_type = 'TensorFlow' # type: str - self.parameter_server_count = kwargs.get('parameter_server_count', 0) - self.worker_count = kwargs.get('worker_count', None) - - -class TextClassification(AutoMLVertical, NlpVertical): - """Text Classification task in AutoML NLP vertical. -NLP - Natural Language Processing. - - All required parameters must be populated in order to send to Azure. - - :ivar featurization_settings: Featurization inputs needed for AutoML job. - :vartype featurization_settings: - ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings - :ivar fixed_parameters: Model/training parameters that will remain constant throughout - training. - :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters - :ivar limit_settings: Execution constraints for AutoMLJob. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] - :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric for Text-Classification task. Possible values include: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics - """ - - _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, - } - - _attribute_map = { - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'NlpFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, - 'search_space': {'key': 'searchSpace', 'type': '[NlpParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'NlpSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword featurization_settings: Featurization inputs needed for AutoML job. - :paramtype featurization_settings: - ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings - :keyword fixed_parameters: Model/training parameters that will remain constant throughout - training. - :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters - :keyword limit_settings: Execution constraints for AutoMLJob. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] - :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword primary_metric: Primary metric for Text-Classification task. Possible values include: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted". - :paramtype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics - """ - super(TextClassification, self).__init__(**kwargs) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.task_type = 'TextClassification' # type: str - self.primary_metric = kwargs.get('primary_metric', None) - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] - - -class TextClassificationMultilabel(AutoMLVertical, NlpVertical): - """Text Classification Multilabel task in AutoML NLP vertical. -NLP - Natural Language Processing. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar featurization_settings: Featurization inputs needed for AutoML job. - :vartype featurization_settings: - ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings - :ivar fixed_parameters: Model/training parameters that will remain constant throughout - training. - :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters - :ivar limit_settings: Execution constraints for AutoMLJob. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] - :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric for Text-Classification-Multilabel task. - Currently only Accuracy is supported as primary metric, hence user need not set it explicitly. - Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall", - "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "IOU". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics - """ - - _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, - 'primary_metric': {'readonly': True}, - } - - _attribute_map = { - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'NlpFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, - 'search_space': {'key': 'searchSpace', 'type': '[NlpParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'NlpSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword featurization_settings: Featurization inputs needed for AutoML job. - :paramtype featurization_settings: - ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings - :keyword fixed_parameters: Model/training parameters that will remain constant throughout - training. - :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters - :keyword limit_settings: Execution constraints for AutoMLJob. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] - :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - """ - super(TextClassificationMultilabel, self).__init__(**kwargs) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.task_type = 'TextClassificationMultilabel' # type: str - self.primary_metric = None - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] - - -class TextNer(AutoMLVertical, NlpVertical): - """Text-NER task in AutoML NLP vertical. -NER - Named Entity Recognition. -NLP - Natural Language Processing. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar featurization_settings: Featurization inputs needed for AutoML job. - :vartype featurization_settings: - ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings - :ivar fixed_parameters: Model/training parameters that will remain constant throughout - training. - :vartype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters - :ivar limit_settings: Execution constraints for AutoMLJob. - :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings - :ivar search_space: Search space for sampling different combinations of models and their - hyperparameters. - :vartype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] - :ivar sweep_settings: Settings for model sweeping and hyperparameter tuning. - :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings - :ivar validation_data: Validation data inputs. - :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric for Text-NER task. - Only 'Accuracy' is supported for Text-NER, so user need not set this explicitly. Possible - values include: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted". - :vartype primary_metric: str or - ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics - """ - - _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, - 'primary_metric': {'readonly': True}, - } - - _attribute_map = { - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'NlpFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, - 'search_space': {'key': 'searchSpace', 'type': '[NlpParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'NlpSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword featurization_settings: Featurization inputs needed for AutoML job. - :paramtype featurization_settings: - ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings - :keyword fixed_parameters: Model/training parameters that will remain constant throughout - training. - :paramtype fixed_parameters: ~azure.mgmt.machinelearningservices.models.NlpFixedParameters - :keyword limit_settings: Execution constraints for AutoMLJob. - :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalLimitSettings - :keyword search_space: Search space for sampling different combinations of models and their - hyperparameters. - :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.NlpParameterSubspace] - :keyword sweep_settings: Settings for model sweeping and hyperparameter tuning. - :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings - :keyword validation_data: Validation data inputs. - :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - """ - super(TextNer, self).__init__(**kwargs) - self.featurization_settings = kwargs.get('featurization_settings', None) - self.fixed_parameters = kwargs.get('fixed_parameters', None) - self.limit_settings = kwargs.get('limit_settings', None) - self.search_space = kwargs.get('search_space', None) - self.sweep_settings = kwargs.get('sweep_settings', None) - self.validation_data = kwargs.get('validation_data', None) - self.task_type = 'TextNER' # type: str - self.primary_metric = None - self.log_verbosity = kwargs.get('log_verbosity', None) - self.target_column_name = kwargs.get('target_column_name', None) - self.training_data = kwargs['training_data'] - - -class TmpfsOptions(msrest.serialization.Model): - """TmpfsOptions. - - :ivar size: Mention the Tmpfs size. - :vartype size: int - """ - - _attribute_map = { - 'size': {'key': 'size', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword size: Mention the Tmpfs size. - :paramtype size: int - """ - super(TmpfsOptions, self).__init__(**kwargs) - self.size = kwargs.get('size', None) - - -class TopNFeaturesByAttribution(MonitoringFeatureFilterBase): - """TopNFeaturesByAttribution. - - All required parameters must be populated in order to send to Azure. - - :ivar filter_type: Required. [Required] Specifies the feature filter to leverage when selecting - features to calculate metrics over.Constant filled by server. Possible values include: - "AllFeatures", "TopNByAttribution", "FeatureSubset". - :vartype filter_type: str or - ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterType - :ivar top: The number of top features to include. - :vartype top: int - """ - - _validation = { - 'filter_type': {'required': True}, - } - - _attribute_map = { - 'filter_type': {'key': 'filterType', 'type': 'str'}, - 'top': {'key': 'top', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword top: The number of top features to include. - :paramtype top: int - """ - super(TopNFeaturesByAttribution, self).__init__(**kwargs) - self.filter_type = 'TopNByAttribution' # type: str - self.top = kwargs.get('top', 10) - - -class TrailingInputData(MonitoringInputDataBase): - """Trailing input data definition. - - All required parameters must be populated in order to send to Azure. - - :ivar columns: Mapping of column names to special uses. - :vartype columns: dict[str, str] - :ivar data_context: The context metadata of the data source. - :vartype data_context: str - :ivar input_data_type: Required. [Required] Specifies the type of signal to monitor.Constant - filled by server. Possible values include: "Static", "Trailing", "Fixed". - :vartype input_data_type: str or - ~azure.mgmt.machinelearningservices.models.MonitoringInputDataType - :ivar job_input_type: Required. [Required] Specifies the type of job. Possible values include: - "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str - :ivar preprocessing_component_id: The ARM resource ID of the component resource used to - preprocess the data. - :vartype preprocessing_component_id: str - :ivar window_offset: Required. [Required] The time offset between the end of the data window - and the monitor's current run time. - :vartype window_offset: ~datetime.timedelta - :ivar window_size: Required. [Required] The size of the trailing data window. - :vartype window_size: ~datetime.timedelta - """ - - _validation = { - 'input_data_type': {'required': True}, - 'job_input_type': {'required': True}, - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'window_offset': {'required': True}, - 'window_size': {'required': True}, - } - - _attribute_map = { - 'columns': {'key': 'columns', 'type': '{str}'}, - 'data_context': {'key': 'dataContext', 'type': 'str'}, - 'input_data_type': {'key': 'inputDataType', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'preprocessing_component_id': {'key': 'preprocessingComponentId', 'type': 'str'}, - 'window_offset': {'key': 'windowOffset', 'type': 'duration'}, - 'window_size': {'key': 'windowSize', 'type': 'duration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword columns: Mapping of column names to special uses. - :paramtype columns: dict[str, str] - :keyword data_context: The context metadata of the data source. - :paramtype data_context: str - :keyword job_input_type: Required. [Required] Specifies the type of job. Possible values - include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", - "triton_model". - :paramtype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str - :keyword preprocessing_component_id: The ARM resource ID of the component resource used to - preprocess the data. - :paramtype preprocessing_component_id: str - :keyword window_offset: Required. [Required] The time offset between the end of the data window - and the monitor's current run time. - :paramtype window_offset: ~datetime.timedelta - :keyword window_size: Required. [Required] The size of the trailing data window. - :paramtype window_size: ~datetime.timedelta - """ - super(TrailingInputData, self).__init__(**kwargs) - self.input_data_type = 'Trailing' # type: str - self.preprocessing_component_id = kwargs.get('preprocessing_component_id', None) - self.window_offset = kwargs['window_offset'] - self.window_size = kwargs['window_size'] - - -class TrialComponent(msrest.serialization.Model): - """Trial component definition. - - All required parameters must be populated in order to send to Azure. - - :ivar code_id: ARM resource ID of the code asset. - :vartype code_id: str - :ivar command: Required. [Required] The command to execute on startup of the job. eg. "python - train.py". - :vartype command: str - :ivar distribution: Distribution configuration of the job. If set, this should be one of Mpi, - Tensorflow, PyTorch, or null. - :vartype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration - :ivar environment_id: Required. [Required] The ARM resource ID of the Environment specification - for the job. - :vartype environment_id: str - :ivar environment_variables: Environment variables included in the job. - :vartype environment_variables: dict[str, str] - :ivar resources: Compute Resource configuration for the job. - :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration - """ - - _validation = { - 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'environment_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'code_id': {'key': 'codeId', 'type': 'str'}, - 'command': {'key': 'command', 'type': 'str'}, - 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword code_id: ARM resource ID of the code asset. - :paramtype code_id: str - :keyword command: Required. [Required] The command to execute on startup of the job. eg. - "python train.py". - :paramtype command: str - :keyword distribution: Distribution configuration of the job. If set, this should be one of - Mpi, Tensorflow, PyTorch, or null. - :paramtype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration - :keyword environment_id: Required. [Required] The ARM resource ID of the Environment - specification for the job. - :paramtype environment_id: str - :keyword environment_variables: Environment variables included in the job. - :paramtype environment_variables: dict[str, str] - :keyword resources: Compute Resource configuration for the job. - :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration - """ - super(TrialComponent, self).__init__(**kwargs) - self.code_id = kwargs.get('code_id', None) - self.command = kwargs['command'] - self.distribution = kwargs.get('distribution', None) - self.environment_id = kwargs['environment_id'] - self.environment_variables = kwargs.get('environment_variables', None) - self.resources = kwargs.get('resources', None) - - -class TritonInferencingServer(InferencingServer): - """Triton inferencing server configurations. - - All required parameters must be populated in order to send to Azure. - - :ivar server_type: Required. [Required] Inferencing server type for various targets.Constant - filled by server. Possible values include: "AzureMLOnline", "AzureMLBatch", "Triton", "Custom". - :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType - :ivar inference_configuration: Inference configuration for Triton. - :vartype inference_configuration: - ~azure.mgmt.machinelearningservices.models.OnlineInferenceConfiguration - """ - - _validation = { - 'server_type': {'required': True}, - } - - _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, - 'inference_configuration': {'key': 'inferenceConfiguration', 'type': 'OnlineInferenceConfiguration'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword inference_configuration: Inference configuration for Triton. - :paramtype inference_configuration: - ~azure.mgmt.machinelearningservices.models.OnlineInferenceConfiguration - """ - super(TritonInferencingServer, self).__init__(**kwargs) - self.server_type = 'Triton' # type: str - self.inference_configuration = kwargs.get('inference_configuration', None) - - -class TritonModelJobInput(JobInput, AssetJobInput): - """TritonModelJobInput. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str - :ivar description: Description for the input. - :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - """ - - _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str - :keyword description: Description for the input. - :paramtype description: str - """ - super(TritonModelJobInput, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.uri = kwargs['uri'] - self.job_input_type = 'triton_model' # type: str - self.description = kwargs.get('description', None) - - -class TritonModelJobOutput(JobOutput, AssetJobOutput): - """TritonModelJobOutput. - - All required parameters must be populated in order to send to Azure. - - :ivar asset_name: Output Asset Name. - :vartype asset_name: str - :ivar asset_version: Output Asset Version. - :vartype asset_version: str - :ivar auto_delete_setting: Auto delete setting of output data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", - "Direct". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :ivar uri: Output Asset URI. - :vartype uri: str - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by - server. Possible values include: "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType - """ - - _validation = { - 'job_output_type': {'required': True}, - } - - _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword asset_name: Output Asset Name. - :paramtype asset_name: str - :keyword asset_version: Output Asset Version. - :paramtype asset_version: str - :keyword auto_delete_setting: Auto delete setting of output data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", - "Direct". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :keyword uri: Output Asset URI. - :paramtype uri: str - :keyword description: Description for the output. - :paramtype description: str - """ - super(TritonModelJobOutput, self).__init__(**kwargs) - self.asset_name = kwargs.get('asset_name', None) - self.asset_version = kwargs.get('asset_version', None) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.mode = kwargs.get('mode', None) - self.uri = kwargs.get('uri', None) - self.job_output_type = 'triton_model' # type: str - self.description = kwargs.get('description', None) - - -class TruncationSelectionPolicy(EarlyTerminationPolicy): - """Defines an early termination policy that cancels a given percentage of runs at each evaluation interval. - - All required parameters must be populated in order to send to Azure. - - :ivar delay_evaluation: Number of intervals by which to delay the first evaluation. - :vartype delay_evaluation: int - :ivar evaluation_interval: Interval (number of runs) between policy evaluations. - :vartype evaluation_interval: int - :ivar policy_type: Required. [Required] Name of policy configuration.Constant filled by server. - Possible values include: "Bandit", "MedianStopping", "TruncationSelection". - :vartype policy_type: str or - ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType - :ivar truncation_percentage: The percentage of runs to cancel at each evaluation interval. - :vartype truncation_percentage: int - """ - - _validation = { - 'policy_type': {'required': True}, - } - - _attribute_map = { - 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, - 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, - 'policy_type': {'key': 'policyType', 'type': 'str'}, - 'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. - :paramtype delay_evaluation: int - :keyword evaluation_interval: Interval (number of runs) between policy evaluations. - :paramtype evaluation_interval: int - :keyword truncation_percentage: The percentage of runs to cancel at each evaluation interval. - :paramtype truncation_percentage: int - """ - super(TruncationSelectionPolicy, self).__init__(**kwargs) - self.policy_type = 'TruncationSelection' # type: str - self.truncation_percentage = kwargs.get('truncation_percentage', 0) - - -class UpdateWorkspaceQuotas(msrest.serialization.Model): - """The properties for update Quota response. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar type: Specifies the resource type. - :vartype type: str - :ivar limit: The maximum permitted quota of the resource. - :vartype limit: long - :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit - :ivar status: Status of update workspace quota. Possible values include: "Undefined", - "Success", "Failure", "InvalidQuotaBelowClusterMinimum", - "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku", - "OperationNotEnabledForRegion". - :vartype status: str or ~azure.mgmt.machinelearningservices.models.Status - """ - - _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'unit': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword limit: The maximum permitted quota of the resource. - :paramtype limit: long - :keyword status: Status of update workspace quota. Possible values include: "Undefined", - "Success", "Failure", "InvalidQuotaBelowClusterMinimum", - "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku", - "OperationNotEnabledForRegion". - :paramtype status: str or ~azure.mgmt.machinelearningservices.models.Status - """ - super(UpdateWorkspaceQuotas, self).__init__(**kwargs) - self.id = None - self.type = None - self.limit = kwargs.get('limit', None) - self.unit = None - self.status = kwargs.get('status', None) - - -class UpdateWorkspaceQuotasResult(msrest.serialization.Model): - """The result of update workspace quota. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The list of workspace quota update result. - :vartype value: list[~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotas] - :ivar next_link: The URI to fetch the next page of workspace quota update result. Call - ListNext() with this to fetch the next page of Workspace Quota update result. - :vartype next_link: str - """ - - _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs) - self.value = None - self.next_link = None - - -class UriFileDataVersion(DataVersionBaseProperties): - """uri-file data version entity. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :vartype is_archived: bool - :ivar data_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "uri_file", "uri_folder", "mltable". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType - :ivar data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. - :vartype data_uri: str - :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual - Property. - :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar stage: Stage in the data lifecycle assigned to this data asset. - :vartype stage: str - """ - - _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :paramtype is_archived: bool - :keyword data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. - :paramtype data_uri: str - :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual - Property. - :paramtype intellectual_property: - ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword stage: Stage in the data lifecycle assigned to this data asset. - :paramtype stage: str - """ - super(UriFileDataVersion, self).__init__(**kwargs) - self.data_type = 'uri_file' # type: str - - -class UriFileJobInput(JobInput, AssetJobInput): - """UriFileJobInput. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str - :ivar description: Description for the input. - :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - """ - - _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str - :keyword description: Description for the input. - :paramtype description: str - """ - super(UriFileJobInput, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.uri = kwargs['uri'] - self.job_input_type = 'uri_file' # type: str - self.description = kwargs.get('description', None) - - -class UriFileJobOutput(JobOutput, AssetJobOutput): - """UriFileJobOutput. - - All required parameters must be populated in order to send to Azure. - - :ivar asset_name: Output Asset Name. - :vartype asset_name: str - :ivar asset_version: Output Asset Version. - :vartype asset_version: str - :ivar auto_delete_setting: Auto delete setting of output data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", - "Direct". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :ivar uri: Output Asset URI. - :vartype uri: str - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by - server. Possible values include: "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType - """ - - _validation = { - 'job_output_type': {'required': True}, - } - - _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword asset_name: Output Asset Name. - :paramtype asset_name: str - :keyword asset_version: Output Asset Version. - :paramtype asset_version: str - :keyword auto_delete_setting: Auto delete setting of output data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", - "Direct". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :keyword uri: Output Asset URI. - :paramtype uri: str - :keyword description: Description for the output. - :paramtype description: str - """ - super(UriFileJobOutput, self).__init__(**kwargs) - self.asset_name = kwargs.get('asset_name', None) - self.asset_version = kwargs.get('asset_version', None) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.mode = kwargs.get('mode', None) - self.uri = kwargs.get('uri', None) - self.job_output_type = 'uri_file' # type: str - self.description = kwargs.get('description', None) - - -class UriFolderDataVersion(DataVersionBaseProperties): - """uri-folder data version entity. - - All required parameters must be populated in order to send to Azure. - - :ivar description: The asset description text. - :vartype description: str - :ivar properties: The asset property dictionary. - :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :vartype tags: dict[str, str] - :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :vartype is_anonymous: bool - :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :vartype is_archived: bool - :ivar data_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "uri_file", "uri_folder", "mltable". - :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType - :ivar data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. - :vartype data_uri: str - :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual - Property. - :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar stage: Stage in the data lifecycle assigned to this data asset. - :vartype stage: str - """ - - _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword description: The asset description text. - :paramtype description: str - :keyword properties: The asset property dictionary. - :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. - :paramtype tags: dict[str, str] - :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword is_anonymous: If the name version are system generated (anonymous registration). For - types where Stage is defined, when Stage is provided it will be used to populate IsAnonymous. - :paramtype is_anonymous: bool - :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is - provided it will be used to populate IsArchived. - :paramtype is_archived: bool - :keyword data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. - :paramtype data_uri: str - :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual - Property. - :paramtype intellectual_property: - ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword stage: Stage in the data lifecycle assigned to this data asset. - :paramtype stage: str - """ - super(UriFolderDataVersion, self).__init__(**kwargs) - self.data_type = 'uri_folder' # type: str - - -class UriFolderJobInput(JobInput, AssetJobInput): - """UriFolderJobInput. - - All required parameters must be populated in order to send to Azure. - - :ivar mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str - :ivar description: Description for the input. - :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - """ - - _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, - } - - _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str - :keyword description: Description for the input. - :paramtype description: str - """ - super(UriFolderJobInput, self).__init__(**kwargs) - self.mode = kwargs.get('mode', None) - self.uri = kwargs['uri'] - self.job_input_type = 'uri_folder' # type: str - self.description = kwargs.get('description', None) - - -class UriFolderJobOutput(JobOutput, AssetJobOutput): - """UriFolderJobOutput. - - All required parameters must be populated in order to send to Azure. - - :ivar asset_name: Output Asset Name. - :vartype asset_name: str - :ivar asset_version: Output Asset Version. - :vartype asset_version: str - :ivar auto_delete_setting: Auto delete setting of output data asset. - :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", - "Direct". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :ivar uri: Output Asset URI. - :vartype uri: str - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by - server. Possible values include: "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType - """ - - _validation = { - 'job_output_type': {'required': True}, - } - - _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword asset_name: Output Asset Name. - :paramtype asset_name: str - :keyword asset_version: Output Asset Version. - :paramtype asset_version: str - :keyword auto_delete_setting: Auto delete setting of output data asset. - :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", - "Direct". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode - :keyword uri: Output Asset URI. - :paramtype uri: str - :keyword description: Description for the output. - :paramtype description: str - """ - super(UriFolderJobOutput, self).__init__(**kwargs) - self.asset_name = kwargs.get('asset_name', None) - self.asset_version = kwargs.get('asset_version', None) - self.auto_delete_setting = kwargs.get('auto_delete_setting', None) - self.mode = kwargs.get('mode', None) - self.uri = kwargs.get('uri', None) - self.job_output_type = 'uri_folder' # type: str - self.description = kwargs.get('description', None) - - -class Usage(msrest.serialization.Model): - """Describes AML Resource Usage. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Specifies the resource ID. - :vartype id: str - :ivar aml_workspace_location: Region of the AML workspace in the id. - :vartype aml_workspace_location: str - :ivar type: Specifies the resource type. - :vartype type: str - :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count". - :vartype unit: str or ~azure.mgmt.machinelearningservices.models.UsageUnit - :ivar current_value: The current usage of the resource. - :vartype current_value: long - :ivar limit: The maximum permitted usage of the resource. - :vartype limit: long - :ivar name: The name of the type of usage. - :vartype name: ~azure.mgmt.machinelearningservices.models.UsageName - """ - - _validation = { - 'id': {'readonly': True}, - 'aml_workspace_location': {'readonly': True}, - 'type': {'readonly': True}, - 'unit': {'readonly': True}, - 'current_value': {'readonly': True}, - 'limit': {'readonly': True}, - 'name': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'current_value': {'key': 'currentValue', 'type': 'long'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'UsageName'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(Usage, self).__init__(**kwargs) - self.id = None - self.aml_workspace_location = None - self.type = None - self.unit = None - self.current_value = None - self.limit = None - self.name = None - - -class UsageName(msrest.serialization.Model): - """The Usage Names. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar value: The name of the resource. - :vartype value: str - :ivar localized_value: The localized name of the resource. - :vartype localized_value: str - """ - - _validation = { - 'value': {'readonly': True}, - 'localized_value': {'readonly': True}, - } - - _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'localized_value': {'key': 'localizedValue', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(UsageName, self).__init__(**kwargs) - self.value = None - self.localized_value = None - - -class UserAccountCredentials(msrest.serialization.Model): - """Settings for user account that gets created on each on the nodes of a compute. - - All required parameters must be populated in order to send to Azure. - - :ivar admin_user_name: Required. Name of the administrator user account which can be used to - SSH to nodes. - :vartype admin_user_name: str - :ivar admin_user_ssh_public_key: SSH public key of the administrator user account. - :vartype admin_user_ssh_public_key: str - :ivar admin_user_password: Password of the administrator user account. - :vartype admin_user_password: str - """ - - _validation = { - 'admin_user_name': {'required': True}, - } - - _attribute_map = { - 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, - 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'}, - 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword admin_user_name: Required. Name of the administrator user account which can be used to - SSH to nodes. - :paramtype admin_user_name: str - :keyword admin_user_ssh_public_key: SSH public key of the administrator user account. - :paramtype admin_user_ssh_public_key: str - :keyword admin_user_password: Password of the administrator user account. - :paramtype admin_user_password: str - """ - super(UserAccountCredentials, self).__init__(**kwargs) - self.admin_user_name = kwargs['admin_user_name'] - self.admin_user_ssh_public_key = kwargs.get('admin_user_ssh_public_key', None) - self.admin_user_password = kwargs.get('admin_user_password', None) - - -class UserAssignedIdentity(msrest.serialization.Model): - """User assigned identity properties. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar principal_id: The principal ID of the assigned identity. - :vartype principal_id: str - :ivar client_id: The client ID of the assigned identity. - :vartype client_id: str - """ - - _validation = { - 'principal_id': {'readonly': True}, - 'client_id': {'readonly': True}, - } - - _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(UserAssignedIdentity, self).__init__(**kwargs) - self.principal_id = None - self.client_id = None - - -class UserCreatedAcrAccount(msrest.serialization.Model): - """UserCreatedAcrAccount. - - :ivar arm_resource_id: ARM ResourceId of a resource. - :vartype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId - """ - - _attribute_map = { - 'arm_resource_id': {'key': 'armResourceId', 'type': 'ArmResourceId'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword arm_resource_id: ARM ResourceId of a resource. - :paramtype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId - """ - super(UserCreatedAcrAccount, self).__init__(**kwargs) - self.arm_resource_id = kwargs.get('arm_resource_id', None) - - -class UserCreatedStorageAccount(msrest.serialization.Model): - """UserCreatedStorageAccount. - - :ivar arm_resource_id: ARM ResourceId of a resource. - :vartype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId - """ - - _attribute_map = { - 'arm_resource_id': {'key': 'armResourceId', 'type': 'ArmResourceId'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword arm_resource_id: ARM ResourceId of a resource. - :paramtype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId - """ - super(UserCreatedStorageAccount, self).__init__(**kwargs) - self.arm_resource_id = kwargs.get('arm_resource_id', None) - - -class UserIdentity(IdentityConfiguration): - """User identity configuration. - - All required parameters must be populated in order to send to Azure. - - :ivar identity_type: Required. [Required] Specifies the type of identity framework.Constant - filled by server. Possible values include: "Managed", "AMLToken", "UserIdentity". - :vartype identity_type: str or - ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType - """ - - _validation = { - 'identity_type': {'required': True}, - } - - _attribute_map = { - 'identity_type': {'key': 'identityType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(UserIdentity, self).__init__(**kwargs) - self.identity_type = 'UserIdentity' # type: str - - -class UsernamePasswordAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): - """UsernamePasswordAuthTypeWorkspaceConnectionProperties. - - All required parameters must be populated in order to send to Azure. - - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". - :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :ivar expiry_time: - :vartype expiry_time: ~datetime.datetime - :ivar metadata: Any object. - :vartype metadata: any - :ivar target: - :vartype target: str - :ivar credentials: - :vartype credentials: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUsernamePassword - """ - - _validation = { - 'auth_type': {'required': True}, - } - - _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionUsernamePassword'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", - "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", - "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". - :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory - :keyword expiry_time: - :paramtype expiry_time: ~datetime.datetime - :keyword metadata: Any object. - :paramtype metadata: any - :keyword target: - :paramtype target: str - :keyword credentials: - :paramtype credentials: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUsernamePassword - """ - super(UsernamePasswordAuthTypeWorkspaceConnectionProperties, self).__init__(**kwargs) - self.auth_type = 'UsernamePassword' # type: str - self.credentials = kwargs.get('credentials', None) - - -class VirtualMachineSchema(msrest.serialization.Model): - """VirtualMachineSchema. - - :ivar properties: - :vartype properties: ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties - """ - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'VirtualMachineSchemaProperties'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: - :paramtype properties: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties - """ - super(VirtualMachineSchema, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class VirtualMachine(Compute, VirtualMachineSchema): - """A Machine Learning compute based on Azure Virtual Machines. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar properties: - :vartype properties: ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - :ivar compute_location: Location for the underlying compute. - :vartype compute_location: str - :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar description: The description of the Machine Learning compute. - :vartype description: str - :ivar created_on: The time at which the compute was created. - :vartype created_on: ~datetime.datetime - :ivar modified_on: The time at which the compute was last modified. - :vartype modified_on: ~datetime.datetime - :ivar resource_id: ARM resource id of the underlying compute. - :vartype resource_id: str - :ivar provisioning_errors: Errors during provisioning. - :vartype provisioning_errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar is_attached_compute: Indicating whether the compute was provisioned by user and brought - from outside if true, or machine learning service provisioned it if false. - :vartype is_attached_compute: bool - :ivar disable_local_auth: Opt-out of local authentication and ensure customers can use only MSI - and AAD exclusively for authentication. - :vartype disable_local_auth: bool - """ - - _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, - } - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'VirtualMachineSchemaProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: - :paramtype properties: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties - :keyword compute_location: Location for the underlying compute. - :paramtype compute_location: str - :keyword description: The description of the Machine Learning compute. - :paramtype description: str - :keyword resource_id: ARM resource id of the underlying compute. - :paramtype resource_id: str - :keyword disable_local_auth: Opt-out of local authentication and ensure customers can use only - MSI and AAD exclusively for authentication. - :paramtype disable_local_auth: bool - """ - super(VirtualMachine, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - self.compute_type = 'VirtualMachine' # type: str - self.compute_location = kwargs.get('compute_location', None) - self.provisioning_state = None - self.description = kwargs.get('description', None) - self.created_on = None - self.modified_on = None - self.resource_id = kwargs.get('resource_id', None) - self.provisioning_errors = None - self.is_attached_compute = None - self.disable_local_auth = kwargs.get('disable_local_auth', None) - - -class VirtualMachineImage(msrest.serialization.Model): - """Virtual Machine image for Windows AML Compute. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Required. Virtual Machine image path. - :vartype id: str - """ - - _validation = { - 'id': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword id: Required. Virtual Machine image path. - :paramtype id: str - """ - super(VirtualMachineImage, self).__init__(**kwargs) - self.id = kwargs['id'] - - -class VirtualMachineSchemaProperties(msrest.serialization.Model): - """VirtualMachineSchemaProperties. - - :ivar virtual_machine_size: Virtual Machine size. - :vartype virtual_machine_size: str - :ivar ssh_port: Port open for ssh connections. - :vartype ssh_port: int - :ivar notebook_server_port: Notebook server port open for ssh connections. - :vartype notebook_server_port: int - :ivar address: Public IP address of the virtual machine. - :vartype address: str - :ivar administrator_account: Admin credentials for virtual machine. - :vartype administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - :ivar is_notebook_instance_compute: Indicates whether this compute will be used for running - notebooks. - :vartype is_notebook_instance_compute: bool - """ - - _attribute_map = { - 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'}, - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'notebook_server_port': {'key': 'notebookServerPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, - 'is_notebook_instance_compute': {'key': 'isNotebookInstanceCompute', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword virtual_machine_size: Virtual Machine size. - :paramtype virtual_machine_size: str - :keyword ssh_port: Port open for ssh connections. - :paramtype ssh_port: int - :keyword notebook_server_port: Notebook server port open for ssh connections. - :paramtype notebook_server_port: int - :keyword address: Public IP address of the virtual machine. - :paramtype address: str - :keyword administrator_account: Admin credentials for virtual machine. - :paramtype administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - :keyword is_notebook_instance_compute: Indicates whether this compute will be used for running - notebooks. - :paramtype is_notebook_instance_compute: bool - """ - super(VirtualMachineSchemaProperties, self).__init__(**kwargs) - self.virtual_machine_size = kwargs.get('virtual_machine_size', None) - self.ssh_port = kwargs.get('ssh_port', None) - self.notebook_server_port = kwargs.get('notebook_server_port', None) - self.address = kwargs.get('address', None) - self.administrator_account = kwargs.get('administrator_account', None) - self.is_notebook_instance_compute = kwargs.get('is_notebook_instance_compute', None) - - -class VirtualMachineSecretsSchema(msrest.serialization.Model): - """VirtualMachineSecretsSchema. - - :ivar administrator_account: Admin credentials for virtual machine. - :vartype administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - """ - - _attribute_map = { - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword administrator_account: Admin credentials for virtual machine. - :paramtype administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - """ - super(VirtualMachineSecretsSchema, self).__init__(**kwargs) - self.administrator_account = kwargs.get('administrator_account', None) - - -class VirtualMachineSecrets(ComputeSecrets, VirtualMachineSecretsSchema): - """Secrets related to a Machine Learning compute based on AKS. - - All required parameters must be populated in order to send to Azure. - - :ivar administrator_account: Admin credentials for virtual machine. - :vartype administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". - :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType - """ - - _validation = { - 'compute_type': {'required': True}, - } - - _attribute_map = { - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword administrator_account: Admin credentials for virtual machine. - :paramtype administrator_account: - ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - """ - super(VirtualMachineSecrets, self).__init__(**kwargs) - self.administrator_account = kwargs.get('administrator_account', None) - self.compute_type = 'VirtualMachine' # type: str - - -class VirtualMachineSize(msrest.serialization.Model): - """Describes the properties of a VM size. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar name: The name of the virtual machine size. - :vartype name: str - :ivar family: The family name of the virtual machine size. - :vartype family: str - :ivar v_cp_us: The number of vCPUs supported by the virtual machine size. - :vartype v_cp_us: int - :ivar gpus: The number of gPUs supported by the virtual machine size. - :vartype gpus: int - :ivar os_vhd_size_mb: The OS VHD disk size, in MB, allowed by the virtual machine size. - :vartype os_vhd_size_mb: int - :ivar max_resource_volume_mb: The resource volume size, in MB, allowed by the virtual machine - size. - :vartype max_resource_volume_mb: int - :ivar memory_gb: The amount of memory, in GB, supported by the virtual machine size. - :vartype memory_gb: float - :ivar low_priority_capable: Specifies if the virtual machine size supports low priority VMs. - :vartype low_priority_capable: bool - :ivar premium_io: Specifies if the virtual machine size supports premium IO. - :vartype premium_io: bool - :ivar estimated_vm_prices: The estimated price information for using a VM. - :vartype estimated_vm_prices: ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices - :ivar supported_compute_types: Specifies the compute types supported by the virtual machine - size. - :vartype supported_compute_types: list[str] - """ - - _validation = { - 'name': {'readonly': True}, - 'family': {'readonly': True}, - 'v_cp_us': {'readonly': True}, - 'gpus': {'readonly': True}, - 'os_vhd_size_mb': {'readonly': True}, - 'max_resource_volume_mb': {'readonly': True}, - 'memory_gb': {'readonly': True}, - 'low_priority_capable': {'readonly': True}, - 'premium_io': {'readonly': True}, - } - - _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'family': {'key': 'family', 'type': 'str'}, - 'v_cp_us': {'key': 'vCPUs', 'type': 'int'}, - 'gpus': {'key': 'gpus', 'type': 'int'}, - 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'}, - 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'}, - 'memory_gb': {'key': 'memoryGB', 'type': 'float'}, - 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'}, - 'premium_io': {'key': 'premiumIO', 'type': 'bool'}, - 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVMPrices'}, - 'supported_compute_types': {'key': 'supportedComputeTypes', 'type': '[str]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword estimated_vm_prices: The estimated price information for using a VM. - :paramtype estimated_vm_prices: ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices - :keyword supported_compute_types: Specifies the compute types supported by the virtual machine - size. - :paramtype supported_compute_types: list[str] - """ - super(VirtualMachineSize, self).__init__(**kwargs) - self.name = None - self.family = None - self.v_cp_us = None - self.gpus = None - self.os_vhd_size_mb = None - self.max_resource_volume_mb = None - self.memory_gb = None - self.low_priority_capable = None - self.premium_io = None - self.estimated_vm_prices = kwargs.get('estimated_vm_prices', None) - self.supported_compute_types = kwargs.get('supported_compute_types', None) - - -class VirtualMachineSizeListResult(msrest.serialization.Model): - """The List Virtual Machine size operation response. - - :ivar value: The list of virtual machine sizes supported by AmlCompute. - :vartype value: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[VirtualMachineSize]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword value: The list of virtual machine sizes supported by AmlCompute. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] - """ - super(VirtualMachineSizeListResult, self).__init__(**kwargs) - self.value = kwargs.get('value', None) - - -class VirtualMachineSshCredentials(msrest.serialization.Model): - """Admin credentials for virtual machine. - - :ivar username: Username of admin account. - :vartype username: str - :ivar password: Password of admin account. - :vartype password: str - :ivar public_key_data: Public key data. - :vartype public_key_data: str - :ivar private_key_data: Private key data. - :vartype private_key_data: str - """ - - _attribute_map = { - 'username': {'key': 'username', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'public_key_data': {'key': 'publicKeyData', 'type': 'str'}, - 'private_key_data': {'key': 'privateKeyData', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword username: Username of admin account. - :paramtype username: str - :keyword password: Password of admin account. - :paramtype password: str - :keyword public_key_data: Public key data. - :paramtype public_key_data: str - :keyword private_key_data: Private key data. - :paramtype private_key_data: str - """ - super(VirtualMachineSshCredentials, self).__init__(**kwargs) - self.username = kwargs.get('username', None) - self.password = kwargs.get('password', None) - self.public_key_data = kwargs.get('public_key_data', None) - self.private_key_data = kwargs.get('private_key_data', None) - - -class VolumeDefinition(msrest.serialization.Model): - """VolumeDefinition. - - :ivar type: Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe. Possible - values include: "bind", "volume", "tmpfs", "npipe". Default value: "bind". - :vartype type: str or ~azure.mgmt.machinelearningservices.models.VolumeDefinitionType - :ivar read_only: Indicate whether to mount volume as readOnly. Default value for this is false. - :vartype read_only: bool - :ivar source: Source of the mount. For bind mounts this is the host path. - :vartype source: str - :ivar target: Target of the mount. For bind mounts this is the path in the container. - :vartype target: str - :ivar consistency: Consistency of the volume. - :vartype consistency: str - :ivar bind: Bind Options of the mount. - :vartype bind: ~azure.mgmt.machinelearningservices.models.BindOptions - :ivar volume: Volume Options of the mount. - :vartype volume: ~azure.mgmt.machinelearningservices.models.VolumeOptions - :ivar tmpfs: tmpfs option of the mount. - :vartype tmpfs: ~azure.mgmt.machinelearningservices.models.TmpfsOptions - """ - - _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'read_only': {'key': 'readOnly', 'type': 'bool'}, - 'source': {'key': 'source', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, - 'consistency': {'key': 'consistency', 'type': 'str'}, - 'bind': {'key': 'bind', 'type': 'BindOptions'}, - 'volume': {'key': 'volume', 'type': 'VolumeOptions'}, - 'tmpfs': {'key': 'tmpfs', 'type': 'TmpfsOptions'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword type: Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe. Possible - values include: "bind", "volume", "tmpfs", "npipe". Default value: "bind". - :paramtype type: str or ~azure.mgmt.machinelearningservices.models.VolumeDefinitionType - :keyword read_only: Indicate whether to mount volume as readOnly. Default value for this is - false. - :paramtype read_only: bool - :keyword source: Source of the mount. For bind mounts this is the host path. - :paramtype source: str - :keyword target: Target of the mount. For bind mounts this is the path in the container. - :paramtype target: str - :keyword consistency: Consistency of the volume. - :paramtype consistency: str - :keyword bind: Bind Options of the mount. - :paramtype bind: ~azure.mgmt.machinelearningservices.models.BindOptions - :keyword volume: Volume Options of the mount. - :paramtype volume: ~azure.mgmt.machinelearningservices.models.VolumeOptions - :keyword tmpfs: tmpfs option of the mount. - :paramtype tmpfs: ~azure.mgmt.machinelearningservices.models.TmpfsOptions - """ - super(VolumeDefinition, self).__init__(**kwargs) - self.type = kwargs.get('type', "bind") - self.read_only = kwargs.get('read_only', None) - self.source = kwargs.get('source', None) - self.target = kwargs.get('target', None) - self.consistency = kwargs.get('consistency', None) - self.bind = kwargs.get('bind', None) - self.volume = kwargs.get('volume', None) - self.tmpfs = kwargs.get('tmpfs', None) - - -class VolumeOptions(msrest.serialization.Model): - """VolumeOptions. - - :ivar nocopy: Indicate whether volume is nocopy. - :vartype nocopy: bool - """ - - _attribute_map = { - 'nocopy': {'key': 'nocopy', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword nocopy: Indicate whether volume is nocopy. - :paramtype nocopy: bool - """ - super(VolumeOptions, self).__init__(**kwargs) - self.nocopy = kwargs.get('nocopy', None) - - -class Workspace(Resource): - """An object that represents a machine learning workspace. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar kind: - :vartype kind: str - :ivar location: - :vartype location: str - :ivar sku: Optional. This field is required to be implemented by the RP because AML is - supporting more than one tier. - :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar tags: A set of tags. Dictionary of :code:``. - :vartype tags: dict[str, str] - :ivar allow_public_access_when_behind_vnet: The flag to indicate whether to allow public access - when behind VNet. - :vartype allow_public_access_when_behind_vnet: bool - :ivar application_insights: ARM id of the application insights associated with this workspace. - :vartype application_insights: str - :ivar associated_workspaces: - :vartype associated_workspaces: list[str] - :ivar container_registries: - :vartype container_registries: list[str] - :ivar container_registry: ARM id of the container registry associated with this workspace. - :vartype container_registry: str - :ivar description: The description of this workspace. - :vartype description: str - :ivar discovery_url: Url for the discovery service to identify regional endpoints for machine - learning experimentation services. - :vartype discovery_url: str - :ivar enable_data_isolation: - :vartype enable_data_isolation: bool - :ivar encryption: - :vartype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionProperty - :ivar existing_workspaces: - :vartype existing_workspaces: list[str] - :ivar feature_store_settings: Settings for feature store type workspace. - :vartype feature_store_settings: - ~azure.mgmt.machinelearningservices.models.FeatureStoreSettings - :ivar friendly_name: The friendly name for this workspace. This name in mutable. - :vartype friendly_name: str - :ivar hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data - collected by the service. - :vartype hbi_workspace: bool - :ivar hub_resource_id: - :vartype hub_resource_id: str - :ivar image_build_compute: The compute name for image build. - :vartype image_build_compute: str - :ivar key_vault: ARM id of the key vault associated with this workspace. This cannot be changed - once the workspace has been created. - :vartype key_vault: str - :ivar key_vaults: - :vartype key_vaults: list[str] - :ivar managed_network: Managed Network settings for a machine learning workspace. - :vartype managed_network: ~azure.mgmt.machinelearningservices.models.ManagedNetworkSettings - :ivar ml_flow_tracking_uri: The URI associated with this workspace that machine learning flow - must point at to set up tracking. - :vartype ml_flow_tracking_uri: str - :ivar notebook_info: The notebook info of Azure ML workspace. - :vartype notebook_info: ~azure.mgmt.machinelearningservices.models.NotebookResourceInfo - :ivar primary_user_assigned_identity: The user assigned identity resource id that represents - the workspace identity. - :vartype primary_user_assigned_identity: str - :ivar private_endpoint_connections: The list of private endpoint connections in the workspace. - :vartype private_endpoint_connections: - list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] - :ivar private_link_count: Count of private connections in the workspace. - :vartype private_link_count: int - :ivar provisioning_state: The current deployment state of workspace resource. The - provisioningState is to indicate states for resource provisioning. Possible values include: - "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled". - :vartype provisioning_state: str or - ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar public_network_access: Whether requests from Public Network are allowed. Possible values - include: "Enabled", "Disabled". - :vartype public_network_access: str or - ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType - :ivar service_managed_resources_settings: The service managed resource settings. - :vartype service_managed_resources_settings: - ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings - :ivar service_provisioned_resource_group: The name of the managed resource group created by - workspace RP in customer subscription if the workspace is CMK workspace. - :vartype service_provisioned_resource_group: str - :ivar shared_private_link_resources: The list of shared private link resources in this - workspace. - :vartype shared_private_link_resources: - list[~azure.mgmt.machinelearningservices.models.SharedPrivateLinkResource] - :ivar soft_delete_retention_in_days: Retention time in days after workspace get soft deleted. - :vartype soft_delete_retention_in_days: int - :ivar storage_account: ARM id of the storage account associated with this workspace. This - cannot be changed once the workspace has been created. - :vartype storage_account: str - :ivar storage_accounts: - :vartype storage_accounts: list[str] - :ivar storage_hns_enabled: If the storage associated with the workspace has hierarchical - namespace(HNS) enabled. - :vartype storage_hns_enabled: bool - :ivar system_datastores_auth_mode: The auth mode used for accessing the system datastores of - the workspace. - :vartype system_datastores_auth_mode: str - :ivar tenant_id: The tenant id associated with this workspace. - :vartype tenant_id: str - :ivar v1_legacy_mode: Enabling v1_legacy_mode may prevent you from using features provided by - the v2 API. - :vartype v1_legacy_mode: bool - :ivar workspace_hub_config: WorkspaceHub's configuration object. - :vartype workspace_hub_config: ~azure.mgmt.machinelearningservices.models.WorkspaceHubConfig - :ivar workspace_id: The immutable id associated with this workspace. - :vartype workspace_id: str - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'ml_flow_tracking_uri': {'readonly': True}, - 'notebook_info': {'readonly': True}, - 'private_endpoint_connections': {'readonly': True}, - 'private_link_count': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'service_provisioned_resource_group': {'readonly': True}, - 'storage_hns_enabled': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'workspace_id': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'}, - 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, - 'associated_workspaces': {'key': 'properties.associatedWorkspaces', 'type': '[str]'}, - 'container_registries': {'key': 'properties.containerRegistries', 'type': '[str]'}, - 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'}, - 'enable_data_isolation': {'key': 'properties.enableDataIsolation', 'type': 'bool'}, - 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'}, - 'existing_workspaces': {'key': 'properties.existingWorkspaces', 'type': '[str]'}, - 'feature_store_settings': {'key': 'properties.featureStoreSettings', 'type': 'FeatureStoreSettings'}, - 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, - 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'}, - 'hub_resource_id': {'key': 'properties.hubResourceId', 'type': 'str'}, - 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, - 'key_vault': {'key': 'properties.keyVault', 'type': 'str'}, - 'key_vaults': {'key': 'properties.keyVaults', 'type': '[str]'}, - 'managed_network': {'key': 'properties.managedNetwork', 'type': 'ManagedNetworkSettings'}, - 'ml_flow_tracking_uri': {'key': 'properties.mlFlowTrackingUri', 'type': 'str'}, - 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'}, - 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'}, - 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, - 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, - 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'}, - 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'}, - 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'}, - 'soft_delete_retention_in_days': {'key': 'properties.softDeleteRetentionInDays', 'type': 'int'}, - 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'}, - 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[str]'}, - 'storage_hns_enabled': {'key': 'properties.storageHnsEnabled', 'type': 'bool'}, - 'system_datastores_auth_mode': {'key': 'properties.systemDatastoresAuthMode', 'type': 'str'}, - 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'}, - 'v1_legacy_mode': {'key': 'properties.v1LegacyMode', 'type': 'bool'}, - 'workspace_hub_config': {'key': 'properties.workspaceHubConfig', 'type': 'WorkspaceHubConfig'}, - 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword kind: - :paramtype kind: str - :keyword location: - :paramtype location: str - :keyword sku: Optional. This field is required to be implemented by the RP because AML is - supporting more than one tier. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - :keyword tags: A set of tags. Dictionary of :code:``. - :paramtype tags: dict[str, str] - :keyword allow_public_access_when_behind_vnet: The flag to indicate whether to allow public - access when behind VNet. - :paramtype allow_public_access_when_behind_vnet: bool - :keyword application_insights: ARM id of the application insights associated with this - workspace. - :paramtype application_insights: str - :keyword associated_workspaces: - :paramtype associated_workspaces: list[str] - :keyword container_registries: - :paramtype container_registries: list[str] - :keyword container_registry: ARM id of the container registry associated with this workspace. - :paramtype container_registry: str - :keyword description: The description of this workspace. - :paramtype description: str - :keyword discovery_url: Url for the discovery service to identify regional endpoints for - machine learning experimentation services. - :paramtype discovery_url: str - :keyword enable_data_isolation: - :paramtype enable_data_isolation: bool - :keyword encryption: - :paramtype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionProperty - :keyword existing_workspaces: - :paramtype existing_workspaces: list[str] - :keyword feature_store_settings: Settings for feature store type workspace. - :paramtype feature_store_settings: - ~azure.mgmt.machinelearningservices.models.FeatureStoreSettings - :keyword friendly_name: The friendly name for this workspace. This name in mutable. - :paramtype friendly_name: str - :keyword hbi_workspace: The flag to signal HBI data in the workspace and reduce diagnostic data - collected by the service. - :paramtype hbi_workspace: bool - :keyword hub_resource_id: - :paramtype hub_resource_id: str - :keyword image_build_compute: The compute name for image build. - :paramtype image_build_compute: str - :keyword key_vault: ARM id of the key vault associated with this workspace. This cannot be - changed once the workspace has been created. - :paramtype key_vault: str - :keyword key_vaults: - :paramtype key_vaults: list[str] - :keyword managed_network: Managed Network settings for a machine learning workspace. - :paramtype managed_network: ~azure.mgmt.machinelearningservices.models.ManagedNetworkSettings - :keyword primary_user_assigned_identity: The user assigned identity resource id that represents - the workspace identity. - :paramtype primary_user_assigned_identity: str - :keyword public_network_access: Whether requests from Public Network are allowed. Possible - values include: "Enabled", "Disabled". - :paramtype public_network_access: str or - ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType - :keyword service_managed_resources_settings: The service managed resource settings. - :paramtype service_managed_resources_settings: - ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings - :keyword shared_private_link_resources: The list of shared private link resources in this - workspace. - :paramtype shared_private_link_resources: - list[~azure.mgmt.machinelearningservices.models.SharedPrivateLinkResource] - :keyword soft_delete_retention_in_days: Retention time in days after workspace get soft - deleted. - :paramtype soft_delete_retention_in_days: int - :keyword storage_account: ARM id of the storage account associated with this workspace. This - cannot be changed once the workspace has been created. - :paramtype storage_account: str - :keyword storage_accounts: - :paramtype storage_accounts: list[str] - :keyword system_datastores_auth_mode: The auth mode used for accessing the system datastores of - the workspace. - :paramtype system_datastores_auth_mode: str - :keyword v1_legacy_mode: Enabling v1_legacy_mode may prevent you from using features provided - by the v2 API. - :paramtype v1_legacy_mode: bool - :keyword workspace_hub_config: WorkspaceHub's configuration object. - :paramtype workspace_hub_config: ~azure.mgmt.machinelearningservices.models.WorkspaceHubConfig - """ - super(Workspace, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.kind = kwargs.get('kind', None) - self.location = kwargs.get('location', None) - self.sku = kwargs.get('sku', None) - self.tags = kwargs.get('tags', None) - self.allow_public_access_when_behind_vnet = kwargs.get('allow_public_access_when_behind_vnet', None) - self.application_insights = kwargs.get('application_insights', None) - self.associated_workspaces = kwargs.get('associated_workspaces', None) - self.container_registries = kwargs.get('container_registries', None) - self.container_registry = kwargs.get('container_registry', None) - self.description = kwargs.get('description', None) - self.discovery_url = kwargs.get('discovery_url', None) - self.enable_data_isolation = kwargs.get('enable_data_isolation', None) - self.encryption = kwargs.get('encryption', None) - self.existing_workspaces = kwargs.get('existing_workspaces', None) - self.feature_store_settings = kwargs.get('feature_store_settings', None) - self.friendly_name = kwargs.get('friendly_name', None) - self.hbi_workspace = kwargs.get('hbi_workspace', None) - self.hub_resource_id = kwargs.get('hub_resource_id', None) - self.image_build_compute = kwargs.get('image_build_compute', None) - self.key_vault = kwargs.get('key_vault', None) - self.key_vaults = kwargs.get('key_vaults', None) - self.managed_network = kwargs.get('managed_network', None) - self.ml_flow_tracking_uri = None - self.notebook_info = None - self.primary_user_assigned_identity = kwargs.get('primary_user_assigned_identity', None) - self.private_endpoint_connections = None - self.private_link_count = None - self.provisioning_state = None - self.public_network_access = kwargs.get('public_network_access', None) - self.service_managed_resources_settings = kwargs.get('service_managed_resources_settings', None) - self.service_provisioned_resource_group = None - self.shared_private_link_resources = kwargs.get('shared_private_link_resources', None) - self.soft_delete_retention_in_days = kwargs.get('soft_delete_retention_in_days', None) - self.storage_account = kwargs.get('storage_account', None) - self.storage_accounts = kwargs.get('storage_accounts', None) - self.storage_hns_enabled = None - self.system_datastores_auth_mode = kwargs.get('system_datastores_auth_mode', None) - self.tenant_id = None - self.v1_legacy_mode = kwargs.get('v1_legacy_mode', None) - self.workspace_hub_config = kwargs.get('workspace_hub_config', None) - self.workspace_id = None - - -class WorkspaceConnectionAccessKey(msrest.serialization.Model): - """WorkspaceConnectionAccessKey. - - :ivar access_key_id: - :vartype access_key_id: str - :ivar secret_access_key: - :vartype secret_access_key: str - """ - - _attribute_map = { - 'access_key_id': {'key': 'accessKeyId', 'type': 'str'}, - 'secret_access_key': {'key': 'secretAccessKey', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword access_key_id: - :paramtype access_key_id: str - :keyword secret_access_key: - :paramtype secret_access_key: str - """ - super(WorkspaceConnectionAccessKey, self).__init__(**kwargs) - self.access_key_id = kwargs.get('access_key_id', None) - self.secret_access_key = kwargs.get('secret_access_key', None) - - -class WorkspaceConnectionApiKey(msrest.serialization.Model): - """Api key object for workspace connection credential. - - :ivar key: - :vartype key: str - """ - - _attribute_map = { - 'key': {'key': 'key', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword key: - :paramtype key: str - """ - super(WorkspaceConnectionApiKey, self).__init__(**kwargs) - self.key = kwargs.get('key', None) - - -class WorkspaceConnectionManagedIdentity(msrest.serialization.Model): - """WorkspaceConnectionManagedIdentity. - - :ivar client_id: - :vartype client_id: str - :ivar resource_id: - :vartype resource_id: str - """ - - _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword client_id: - :paramtype client_id: str - :keyword resource_id: - :paramtype resource_id: str - """ - super(WorkspaceConnectionManagedIdentity, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.resource_id = kwargs.get('resource_id', None) - - -class WorkspaceConnectionPersonalAccessToken(msrest.serialization.Model): - """WorkspaceConnectionPersonalAccessToken. - - :ivar pat: - :vartype pat: str - """ - - _attribute_map = { - 'pat': {'key': 'pat', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword pat: - :paramtype pat: str - """ - super(WorkspaceConnectionPersonalAccessToken, self).__init__(**kwargs) - self.pat = kwargs.get('pat', None) - - -class WorkspaceConnectionPropertiesV2BasicResource(Resource): - """WorkspaceConnectionPropertiesV2BasicResource. - - Variables are only populated by the server, and will be ignored when sending a request. - - All required parameters must be populated in order to send to Azure. - - :ivar id: Fully qualified resource ID for the resource. Ex - - /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}. - :vartype id: str - :ivar name: The name of the resource. - :vartype name: str - :ivar type: The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or - "Microsoft.Storage/storageAccounts". - :vartype type: str - :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy - information. - :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. - :vartype properties: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2 - """ - - _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'WorkspaceConnectionPropertiesV2'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: Required. - :paramtype properties: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2 - """ - super(WorkspaceConnectionPropertiesV2BasicResource, self).__init__(**kwargs) - self.properties = kwargs['properties'] - - -class WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult(msrest.serialization.Model): - """WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult. - - :ivar next_link: - :vartype next_link: str - :ivar value: - :vartype value: - list[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[WorkspaceConnectionPropertiesV2BasicResource]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: - :paramtype next_link: str - :keyword value: - :paramtype value: - list[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] - """ - super(WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class WorkspaceConnectionServicePrincipal(msrest.serialization.Model): - """WorkspaceConnectionServicePrincipal. - - :ivar client_id: - :vartype client_id: str - :ivar client_secret: - :vartype client_secret: str - :ivar tenant_id: - :vartype tenant_id: str - """ - - _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'client_secret': {'key': 'clientSecret', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword client_id: - :paramtype client_id: str - :keyword client_secret: - :paramtype client_secret: str - :keyword tenant_id: - :paramtype tenant_id: str - """ - super(WorkspaceConnectionServicePrincipal, self).__init__(**kwargs) - self.client_id = kwargs.get('client_id', None) - self.client_secret = kwargs.get('client_secret', None) - self.tenant_id = kwargs.get('tenant_id', None) - - -class WorkspaceConnectionSharedAccessSignature(msrest.serialization.Model): - """WorkspaceConnectionSharedAccessSignature. - - :ivar sas: - :vartype sas: str - """ - - _attribute_map = { - 'sas': {'key': 'sas', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword sas: - :paramtype sas: str - """ - super(WorkspaceConnectionSharedAccessSignature, self).__init__(**kwargs) - self.sas = kwargs.get('sas', None) - - -class WorkspaceConnectionUpdateParameter(msrest.serialization.Model): - """The properties that the machine learning workspace connection will be updated with. - - :ivar properties: The properties that the machine learning workspace connection will be updated - with. - :vartype properties: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2 - """ - - _attribute_map = { - 'properties': {'key': 'properties', 'type': 'WorkspaceConnectionPropertiesV2'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword properties: The properties that the machine learning workspace connection will be - updated with. - :paramtype properties: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2 - """ - super(WorkspaceConnectionUpdateParameter, self).__init__(**kwargs) - self.properties = kwargs.get('properties', None) - - -class WorkspaceConnectionUsernamePassword(msrest.serialization.Model): - """WorkspaceConnectionUsernamePassword. - - :ivar password: - :vartype password: str - :ivar username: - :vartype username: str - """ - - _attribute_map = { - 'password': {'key': 'password', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword password: - :paramtype password: str - :keyword username: - :paramtype username: str - """ - super(WorkspaceConnectionUsernamePassword, self).__init__(**kwargs) - self.password = kwargs.get('password', None) - self.username = kwargs.get('username', None) - - -class WorkspaceHubConfig(msrest.serialization.Model): - """WorkspaceHub's configuration object. - - :ivar additional_workspace_storage_accounts: - :vartype additional_workspace_storage_accounts: list[str] - :ivar default_workspace_resource_group: - :vartype default_workspace_resource_group: str - """ - - _attribute_map = { - 'additional_workspace_storage_accounts': {'key': 'additionalWorkspaceStorageAccounts', 'type': '[str]'}, - 'default_workspace_resource_group': {'key': 'defaultWorkspaceResourceGroup', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword additional_workspace_storage_accounts: - :paramtype additional_workspace_storage_accounts: list[str] - :keyword default_workspace_resource_group: - :paramtype default_workspace_resource_group: str - """ - super(WorkspaceHubConfig, self).__init__(**kwargs) - self.additional_workspace_storage_accounts = kwargs.get('additional_workspace_storage_accounts', None) - self.default_workspace_resource_group = kwargs.get('default_workspace_resource_group', None) - - -class WorkspaceListResult(msrest.serialization.Model): - """The result of a request to list machine learning workspaces. - - :ivar next_link: The link to the next page constructed using the continuationToken. If null, - there are no additional pages. - :vartype next_link: str - :ivar value: The list of machine learning workspaces. Since this list may be incomplete, the - nextLink field should be used to request the next list of machine learning workspaces. - :vartype value: list[~azure.mgmt.machinelearningservices.models.Workspace] - """ - - _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Workspace]'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword next_link: The link to the next page constructed using the continuationToken. If - null, there are no additional pages. - :paramtype next_link: str - :keyword value: The list of machine learning workspaces. Since this list may be incomplete, the - nextLink field should be used to request the next list of machine learning workspaces. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.Workspace] - """ - super(WorkspaceListResult, self).__init__(**kwargs) - self.next_link = kwargs.get('next_link', None) - self.value = kwargs.get('value', None) - - -class WorkspacePrivateEndpointResource(msrest.serialization.Model): - """The Private Endpoint resource. - - Variables are only populated by the server, and will be ignored when sending a request. - - :ivar id: e.g. - /subscriptions/{networkSubscriptionId}/resourceGroups/{rgName}/providers/Microsoft.Network/privateEndpoints/{privateEndpointName}. - :vartype id: str - :ivar subnet_arm_id: The subnetId that the private endpoint is connected to. - :vartype subnet_arm_id: str - """ - - _validation = { - 'id': {'readonly': True}, - 'subnet_arm_id': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'}, - } - - def __init__( - self, - **kwargs - ): - """ - """ - super(WorkspacePrivateEndpointResource, self).__init__(**kwargs) - self.id = None - self.subnet_arm_id = None - - -class WorkspaceUpdateParameters(msrest.serialization.Model): - """The parameters for updating a machine learning workspace. - - :ivar identity: Managed service identity (system assigned and/or user assigned identities). - :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :ivar sku: Optional. This field is required to be implemented by the RP because AML is - supporting more than one tier. - :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar tags: A set of tags. The resource tags for the machine learning workspace. - :vartype tags: dict[str, str] - :ivar application_insights: ARM id of the application insights associated with this workspace. - :vartype application_insights: str - :ivar container_registry: ARM id of the container registry associated with this workspace. - :vartype container_registry: str - :ivar description: The description of this workspace. - :vartype description: str - :ivar enable_data_isolation: - :vartype enable_data_isolation: bool - :ivar encryption: - :vartype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionUpdateProperties - :ivar feature_store_settings: Settings for feature store type workspace. - :vartype feature_store_settings: - ~azure.mgmt.machinelearningservices.models.FeatureStoreSettings - :ivar friendly_name: The friendly name for this workspace. This name in mutable. - :vartype friendly_name: str - :ivar image_build_compute: The compute name for image build. - :vartype image_build_compute: str - :ivar managed_network: Managed Network settings for a machine learning workspace. - :vartype managed_network: ~azure.mgmt.machinelearningservices.models.ManagedNetworkSettings - :ivar primary_user_assigned_identity: The user assigned identity resource id that represents - the workspace identity. - :vartype primary_user_assigned_identity: str - :ivar public_network_access: Whether requests from Public Network are allowed. Possible values - include: "Enabled", "Disabled". - :vartype public_network_access: str or - ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType - :ivar service_managed_resources_settings: The service managed resource settings. - :vartype service_managed_resources_settings: - ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings - :ivar soft_delete_retention_in_days: Retention time in days after workspace get soft deleted. - :vartype soft_delete_retention_in_days: int - :ivar v1_legacy_mode: Enabling v1_legacy_mode may prevent you from using features provided by - the v2 API. - :vartype v1_legacy_mode: bool - """ - - _attribute_map = { - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, - 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'enable_data_isolation': {'key': 'properties.enableDataIsolation', 'type': 'bool'}, - 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionUpdateProperties'}, - 'feature_store_settings': {'key': 'properties.featureStoreSettings', 'type': 'FeatureStoreSettings'}, - 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, - 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, - 'managed_network': {'key': 'properties.managedNetwork', 'type': 'ManagedNetworkSettings'}, - 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'}, - 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, - 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'}, - 'soft_delete_retention_in_days': {'key': 'properties.softDeleteRetentionInDays', 'type': 'int'}, - 'v1_legacy_mode': {'key': 'properties.v1LegacyMode', 'type': 'bool'}, - } - - def __init__( - self, - **kwargs - ): - """ - :keyword identity: Managed service identity (system assigned and/or user assigned identities). - :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity - :keyword sku: Optional. This field is required to be implemented by the RP because AML is - supporting more than one tier. - :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - :keyword tags: A set of tags. The resource tags for the machine learning workspace. - :paramtype tags: dict[str, str] - :keyword application_insights: ARM id of the application insights associated with this - workspace. - :paramtype application_insights: str - :keyword container_registry: ARM id of the container registry associated with this workspace. - :paramtype container_registry: str - :keyword description: The description of this workspace. - :paramtype description: str - :keyword enable_data_isolation: - :paramtype enable_data_isolation: bool - :keyword encryption: - :paramtype encryption: ~azure.mgmt.machinelearningservices.models.EncryptionUpdateProperties - :keyword feature_store_settings: Settings for feature store type workspace. - :paramtype feature_store_settings: - ~azure.mgmt.machinelearningservices.models.FeatureStoreSettings - :keyword friendly_name: The friendly name for this workspace. This name in mutable. - :paramtype friendly_name: str - :keyword image_build_compute: The compute name for image build. - :paramtype image_build_compute: str - :keyword managed_network: Managed Network settings for a machine learning workspace. - :paramtype managed_network: ~azure.mgmt.machinelearningservices.models.ManagedNetworkSettings - :keyword primary_user_assigned_identity: The user assigned identity resource id that represents - the workspace identity. - :paramtype primary_user_assigned_identity: str - :keyword public_network_access: Whether requests from Public Network are allowed. Possible - values include: "Enabled", "Disabled". - :paramtype public_network_access: str or - ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType - :keyword service_managed_resources_settings: The service managed resource settings. - :paramtype service_managed_resources_settings: - ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings - :keyword soft_delete_retention_in_days: Retention time in days after workspace get soft - deleted. - :paramtype soft_delete_retention_in_days: int - :keyword v1_legacy_mode: Enabling v1_legacy_mode may prevent you from using features provided - by the v2 API. - :paramtype v1_legacy_mode: bool - """ - super(WorkspaceUpdateParameters, self).__init__(**kwargs) - self.identity = kwargs.get('identity', None) - self.sku = kwargs.get('sku', None) - self.tags = kwargs.get('tags', None) - self.application_insights = kwargs.get('application_insights', None) - self.container_registry = kwargs.get('container_registry', None) - self.description = kwargs.get('description', None) - self.enable_data_isolation = kwargs.get('enable_data_isolation', None) - self.encryption = kwargs.get('encryption', None) - self.feature_store_settings = kwargs.get('feature_store_settings', None) - self.friendly_name = kwargs.get('friendly_name', None) - self.image_build_compute = kwargs.get('image_build_compute', None) - self.managed_network = kwargs.get('managed_network', None) - self.primary_user_assigned_identity = kwargs.get('primary_user_assigned_identity', None) - self.public_network_access = kwargs.get('public_network_access', None) - self.service_managed_resources_settings = kwargs.get('service_managed_resources_settings', None) - self.soft_delete_retention_in_days = kwargs.get('soft_delete_retention_in_days', None) - self.v1_legacy_mode = kwargs.get('v1_legacy_mode', None) diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_models_py3.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_models_py3.py index 7b2d35a966f1..06bb4c788989 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_models_py3.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_models_py3.py @@ -1,4 +1,5 @@ # coding=utf-8 +# pylint: disable=too-many-lines # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. @@ -7,81 +8,120 @@ # -------------------------------------------------------------------------- import datetime -from typing import Any, Dict, List, Optional, Union +import sys +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union -from azure.core.exceptions import HttpResponseError -import msrest.serialization +from .. import _serialization -from ._azure_machine_learning_workspaces_enums import * +if sys.version_info >= (3, 9): + from collections.abc import MutableMapping +else: + from typing import MutableMapping # type: ignore # pylint: disable=ungrouped-imports +if TYPE_CHECKING: + # pylint: disable=unused-import,ungrouped-imports + from .. import models as _models +JSON = MutableMapping[str, Any] # pylint: disable=unsubscriptable-object -class WorkspaceConnectionPropertiesV2(msrest.serialization.Model): + +class WorkspaceConnectionPropertiesV2(_serialization.Model): """WorkspaceConnectionPropertiesV2. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AccessKeyAuthTypeWorkspaceConnectionProperties, ApiKeyAuthWorkspaceConnectionProperties, CustomKeysWorkspaceConnectionProperties, ManagedIdentityAuthTypeWorkspaceConnectionProperties, NoneAuthTypeWorkspaceConnectionProperties, PATAuthTypeWorkspaceConnectionProperties, SASAuthTypeWorkspaceConnectionProperties, ServicePrincipalAuthTypeWorkspaceConnectionProperties, UsernamePasswordAuthTypeWorkspaceConnectionProperties. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AccessKeyAuthTypeWorkspaceConnectionProperties, ApiKeyAuthWorkspaceConnectionProperties, + CustomKeysWorkspaceConnectionProperties, ManagedIdentityAuthTypeWorkspaceConnectionProperties, + NoneAuthTypeWorkspaceConnectionProperties, PATAuthTypeWorkspaceConnectionProperties, + SASAuthTypeWorkspaceConnectionProperties, + ServicePrincipalAuthTypeWorkspaceConnectionProperties, + UsernamePasswordAuthTypeWorkspaceConnectionProperties + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", + :ivar category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar created_by_workspace_arm_id: The arm id of the workspace which created this connection. + :vartype created_by_workspace_arm_id: str :ivar expiry_time: :vartype expiry_time: ~datetime.datetime + :ivar is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :vartype is_shared_to_all: bool :ivar metadata: Any object. - :vartype metadata: any + :vartype metadata: JSON :ivar target: :vartype target: str """ _validation = { - 'auth_type': {'required': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, } _subtype_map = { - 'auth_type': {'AccessKey': 'AccessKeyAuthTypeWorkspaceConnectionProperties', 'ApiKey': 'ApiKeyAuthWorkspaceConnectionProperties', 'CustomKeys': 'CustomKeysWorkspaceConnectionProperties', 'ManagedIdentity': 'ManagedIdentityAuthTypeWorkspaceConnectionProperties', 'None': 'NoneAuthTypeWorkspaceConnectionProperties', 'PAT': 'PATAuthTypeWorkspaceConnectionProperties', 'SAS': 'SASAuthTypeWorkspaceConnectionProperties', 'ServicePrincipal': 'ServicePrincipalAuthTypeWorkspaceConnectionProperties', 'UsernamePassword': 'UsernamePasswordAuthTypeWorkspaceConnectionProperties'} + "auth_type": { + "AccessKey": "AccessKeyAuthTypeWorkspaceConnectionProperties", + "ApiKey": "ApiKeyAuthWorkspaceConnectionProperties", + "CustomKeys": "CustomKeysWorkspaceConnectionProperties", + "ManagedIdentity": "ManagedIdentityAuthTypeWorkspaceConnectionProperties", + "None": "NoneAuthTypeWorkspaceConnectionProperties", + "PAT": "PATAuthTypeWorkspaceConnectionProperties", + "SAS": "SASAuthTypeWorkspaceConnectionProperties", + "ServicePrincipal": "ServicePrincipalAuthTypeWorkspaceConnectionProperties", + "UsernamePassword": "UsernamePasswordAuthTypeWorkspaceConnectionProperties", + } } def __init__( self, *, - category: Optional[Union[str, "ConnectionCategory"]] = None, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, expiry_time: Optional[datetime.datetime] = None, - metadata: Optional[Any] = None, + is_shared_to_all: Optional[bool] = None, + metadata: Optional[JSON] = None, target: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", + :keyword category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime + :keyword is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :paramtype is_shared_to_all: bool :keyword metadata: Any object. - :paramtype metadata: any + :paramtype metadata: JSON :keyword target: :paramtype target: str """ - super(WorkspaceConnectionPropertiesV2, self).__init__(**kwargs) - self.auth_type = None # type: Optional[str] + super().__init__(**kwargs) + self.auth_type: Optional[str] = None self.category = category + self.created_by_workspace_arm_id = None self.expiry_time = expiry_time + self.is_shared_to_all = is_shared_to_all self.metadata = metadata self.target = target @@ -89,21 +129,28 @@ def __init__( class AccessKeyAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """AccessKeyAuthTypeWorkspaceConnectionProperties. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", + :ivar category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar created_by_workspace_arm_id: The arm id of the workspace which created this connection. + :vartype created_by_workspace_arm_id: str :ivar expiry_time: :vartype expiry_time: ~datetime.datetime + :ivar is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :vartype is_shared_to_all: bool :ivar metadata: Any object. - :vartype metadata: any + :vartype metadata: JSON :ivar target: :vartype target: str :ivar credentials: @@ -111,82 +158,102 @@ class AccessKeyAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionProperti """ _validation = { - 'auth_type': {'required': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionAccessKey'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionAccessKey"}, } def __init__( self, *, - category: Optional[Union[str, "ConnectionCategory"]] = None, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, expiry_time: Optional[datetime.datetime] = None, - metadata: Optional[Any] = None, + is_shared_to_all: Optional[bool] = None, + metadata: Optional[JSON] = None, target: Optional[str] = None, - credentials: Optional["WorkspaceConnectionAccessKey"] = None, - **kwargs - ): + credentials: Optional["_models.WorkspaceConnectionAccessKey"] = None, + **kwargs: Any + ) -> None: """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", + :keyword category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime + :keyword is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :paramtype is_shared_to_all: bool :keyword metadata: Any object. - :paramtype metadata: any + :paramtype metadata: JSON :keyword target: :paramtype target: str :keyword credentials: :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionAccessKey """ - super(AccessKeyAuthTypeWorkspaceConnectionProperties, self).__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) - self.auth_type = 'AccessKey' # type: str + super().__init__( + category=category, + expiry_time=expiry_time, + is_shared_to_all=is_shared_to_all, + metadata=metadata, + target=target, + **kwargs + ) + self.auth_type: str = "AccessKey" self.credentials = credentials -class DatastoreCredentials(msrest.serialization.Model): +class DatastoreCredentials(_serialization.Model): """Base definition for datastore credentials. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AccountKeyDatastoreCredentials, CertificateDatastoreCredentials, KerberosKeytabCredentials, KerberosPasswordCredentials, NoneDatastoreCredentials, SasDatastoreCredentials, ServicePrincipalDatastoreCredentials. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AccountKeyDatastoreCredentials, CertificateDatastoreCredentials, KerberosKeytabCredentials, + KerberosPasswordCredentials, NoneDatastoreCredentials, SasDatastoreCredentials, + ServicePrincipalDatastoreCredentials All required parameters must be populated in order to send to Azure. - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType """ _validation = { - 'credentials_type': {'required': True}, + "credentials_type": {"required": True}, } _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, } _subtype_map = { - 'credentials_type': {'AccountKey': 'AccountKeyDatastoreCredentials', 'Certificate': 'CertificateDatastoreCredentials', 'KerberosKeytab': 'KerberosKeytabCredentials', 'KerberosPassword': 'KerberosPasswordCredentials', 'None': 'NoneDatastoreCredentials', 'Sas': 'SasDatastoreCredentials', 'ServicePrincipal': 'ServicePrincipalDatastoreCredentials'} + "credentials_type": { + "AccountKey": "AccountKeyDatastoreCredentials", + "Certificate": "CertificateDatastoreCredentials", + "KerberosKeytab": "KerberosKeytabCredentials", + "KerberosPassword": "KerberosPasswordCredentials", + "None": "NoneDatastoreCredentials", + "Sas": "SasDatastoreCredentials", + "ServicePrincipal": "ServicePrincipalDatastoreCredentials", + } } - def __init__( - self, - **kwargs - ): - """ - """ - super(DatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.credentials_type: Optional[str] = None class AccountKeyDatastoreCredentials(DatastoreCredentials): @@ -194,73 +261,72 @@ class AccountKeyDatastoreCredentials(DatastoreCredentials): All required parameters must be populated in order to send to Azure. - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType - :ivar secrets: Required. [Required] Storage account secrets. + :ivar secrets: [Required] Storage account secrets. Required. :vartype secrets: ~azure.mgmt.machinelearningservices.models.AccountKeyDatastoreSecrets """ _validation = { - 'credentials_type': {'required': True}, - 'secrets': {'required': True}, + "credentials_type": {"required": True}, + "secrets": {"required": True}, } _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'AccountKeyDatastoreSecrets'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, + "secrets": {"key": "secrets", "type": "AccountKeyDatastoreSecrets"}, } - def __init__( - self, - *, - secrets: "AccountKeyDatastoreSecrets", - **kwargs - ): + def __init__(self, *, secrets: "_models.AccountKeyDatastoreSecrets", **kwargs: Any) -> None: """ - :keyword secrets: Required. [Required] Storage account secrets. + :keyword secrets: [Required] Storage account secrets. Required. :paramtype secrets: ~azure.mgmt.machinelearningservices.models.AccountKeyDatastoreSecrets """ - super(AccountKeyDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'AccountKey' # type: str + super().__init__(**kwargs) + self.credentials_type: str = "AccountKey" self.secrets = secrets -class DatastoreSecrets(msrest.serialization.Model): +class DatastoreSecrets(_serialization.Model): """Base definition for datastore secrets. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AccountKeyDatastoreSecrets, CertificateDatastoreSecrets, KerberosKeytabSecrets, KerberosPasswordSecrets, SasDatastoreSecrets, ServicePrincipalDatastoreSecrets. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AccountKeyDatastoreSecrets, CertificateDatastoreSecrets, KerberosKeytabSecrets, + KerberosPasswordSecrets, SasDatastoreSecrets, ServicePrincipalDatastoreSecrets All required parameters must be populated in order to send to Azure. - :ivar secrets_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", "Sas", - "ServicePrincipal", "KerberosPassword", "KerberosKeytab". + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType """ _validation = { - 'secrets_type': {'required': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, + "secrets_type": {"key": "secretsType", "type": "str"}, } _subtype_map = { - 'secrets_type': {'AccountKey': 'AccountKeyDatastoreSecrets', 'Certificate': 'CertificateDatastoreSecrets', 'KerberosKeytab': 'KerberosKeytabSecrets', 'KerberosPassword': 'KerberosPasswordSecrets', 'Sas': 'SasDatastoreSecrets', 'ServicePrincipal': 'ServicePrincipalDatastoreSecrets'} + "secrets_type": { + "AccountKey": "AccountKeyDatastoreSecrets", + "Certificate": "CertificateDatastoreSecrets", + "KerberosKeytab": "KerberosKeytabSecrets", + "KerberosPassword": "KerberosPasswordSecrets", + "Sas": "SasDatastoreSecrets", + "ServicePrincipal": "ServicePrincipalDatastoreSecrets", + } } - def __init__( - self, - **kwargs - ): - """ - """ - super(DatastoreSecrets, self).__init__(**kwargs) - self.secrets_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.secrets_type: Optional[str] = None class AccountKeyDatastoreSecrets(DatastoreSecrets): @@ -268,39 +334,34 @@ class AccountKeyDatastoreSecrets(DatastoreSecrets): All required parameters must be populated in order to send to Azure. - :ivar secrets_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", "Sas", - "ServicePrincipal", "KerberosPassword", "KerberosKeytab". + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType :ivar key: Storage account key. :vartype key: str """ _validation = { - 'secrets_type': {'required': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'str'}, + "secrets_type": {"key": "secretsType", "type": "str"}, + "key": {"key": "key", "type": "str"}, } - def __init__( - self, - *, - key: Optional[str] = None, - **kwargs - ): + def __init__(self, *, key: Optional[str] = None, **kwargs: Any) -> None: """ :keyword key: Storage account key. :paramtype key: str """ - super(AccountKeyDatastoreSecrets, self).__init__(**kwargs) - self.secrets_type = 'AccountKey' # type: str + super().__init__(**kwargs) + self.secrets_type: str = "AccountKey" self.key = key -class AcrDetails(msrest.serialization.Model): +class AcrDetails(_serialization.Model): """Details of ACR account to be used for the Registry. :ivar system_created_acr_account: Details of system created ACR account to be used for the @@ -314,17 +375,17 @@ class AcrDetails(msrest.serialization.Model): """ _attribute_map = { - 'system_created_acr_account': {'key': 'systemCreatedAcrAccount', 'type': 'SystemCreatedAcrAccount'}, - 'user_created_acr_account': {'key': 'userCreatedAcrAccount', 'type': 'UserCreatedAcrAccount'}, + "system_created_acr_account": {"key": "systemCreatedAcrAccount", "type": "SystemCreatedAcrAccount"}, + "user_created_acr_account": {"key": "userCreatedAcrAccount", "type": "UserCreatedAcrAccount"}, } def __init__( self, *, - system_created_acr_account: Optional["SystemCreatedAcrAccount"] = None, - user_created_acr_account: Optional["UserCreatedAcrAccount"] = None, - **kwargs - ): + system_created_acr_account: Optional["_models.SystemCreatedAcrAccount"] = None, + user_created_acr_account: Optional["_models.UserCreatedAcrAccount"] = None, + **kwargs: Any + ) -> None: """ :keyword system_created_acr_account: Details of system created ACR account to be used for the Registry. @@ -335,12 +396,12 @@ def __init__( :paramtype user_created_acr_account: ~azure.mgmt.machinelearningservices.models.UserCreatedAcrAccount """ - super(AcrDetails, self).__init__(**kwargs) + super().__init__(**kwargs) self.system_created_acr_account = system_created_acr_account self.user_created_acr_account = user_created_acr_account -class AKSSchema(msrest.serialization.Model): +class AKSSchema(_serialization.Model): """AKSSchema. :ivar properties: AKS properties. @@ -348,42 +409,38 @@ class AKSSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'AKSSchemaProperties'}, + "properties": {"key": "properties", "type": "AKSSchemaProperties"}, } - def __init__( - self, - *, - properties: Optional["AKSSchemaProperties"] = None, - **kwargs - ): + def __init__(self, *, properties: Optional["_models.AKSSchemaProperties"] = None, **kwargs: Any) -> None: """ :keyword properties: AKS properties. :paramtype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties """ - super(AKSSchema, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class Compute(msrest.serialization.Model): +class Compute(_serialization.Model): """Machine Learning compute object. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AKS, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HDInsight, Kubernetes, SynapseSpark, VirtualMachine. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AKS, AmlCompute, ComputeInstance, DataFactory, DataLakeAnalytics, Databricks, HDInsight, + Kubernetes, SynapseSpark, VirtualMachine Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType :ivar compute_location: Location for the underlying compute. :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ProvisioningState :ivar description: The description of the Machine Learning compute. @@ -405,29 +462,40 @@ class Compute(msrest.serialization.Model): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } _subtype_map = { - 'compute_type': {'AKS': 'AKS', 'AmlCompute': 'AmlCompute', 'ComputeInstance': 'ComputeInstance', 'DataFactory': 'DataFactory', 'DataLakeAnalytics': 'DataLakeAnalytics', 'Databricks': 'Databricks', 'HDInsight': 'HDInsight', 'Kubernetes': 'Kubernetes', 'SynapseSpark': 'SynapseSpark', 'VirtualMachine': 'VirtualMachine'} + "compute_type": { + "AKS": "AKS", + "AmlCompute": "AmlCompute", + "ComputeInstance": "ComputeInstance", + "DataFactory": "DataFactory", + "DataLakeAnalytics": "DataLakeAnalytics", + "Databricks": "Databricks", + "HDInsight": "HDInsight", + "Kubernetes": "Kubernetes", + "SynapseSpark": "SynapseSpark", + "VirtualMachine": "VirtualMachine", + } } def __init__( @@ -437,8 +505,8 @@ def __init__( description: Optional[str] = None, resource_id: Optional[str] = None, disable_local_auth: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword compute_location: Location for the underlying compute. :paramtype compute_location: str @@ -450,8 +518,8 @@ def __init__( MSI and AAD exclusively for authentication. :paramtype disable_local_auth: bool """ - super(Compute, self).__init__(**kwargs) - self.compute_type = None # type: Optional[str] + super().__init__(**kwargs) + self.compute_type: Optional[str] = None self.compute_location = compute_location self.provisioning_state = None self.description = description @@ -463,7 +531,7 @@ def __init__( self.disable_local_auth = disable_local_auth -class AKS(Compute, AKSSchema): +class AKS(Compute, AKSSchema): # pylint: disable=too-many-instance-attributes """A Machine Learning compute based on AKS. Variables are only populated by the server, and will be ignored when sending a request. @@ -472,15 +540,15 @@ class AKS(Compute, AKSSchema): :ivar properties: AKS properties. :vartype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType :ivar compute_location: Location for the underlying compute. :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ProvisioningState :ivar description: The description of the Machine Learning compute. @@ -502,38 +570,38 @@ class AKS(Compute, AKSSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'AKSSchemaProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "AKSSchemaProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } def __init__( self, *, - properties: Optional["AKSSchemaProperties"] = None, + properties: Optional["_models.AKSSchemaProperties"] = None, compute_location: Optional[str] = None, description: Optional[str] = None, resource_id: Optional[str] = None, disable_local_auth: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword properties: AKS properties. :paramtype properties: ~azure.mgmt.machinelearningservices.models.AKSSchemaProperties @@ -547,9 +615,16 @@ def __init__( MSI and AAD exclusively for authentication. :paramtype disable_local_auth: bool """ - super(AKS, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) + super().__init__( + compute_location=compute_location, + description=description, + resource_id=resource_id, + disable_local_auth=disable_local_auth, + properties=properties, + **kwargs + ) self.properties = properties - self.compute_type = 'AKS' # type: str + self.compute_type: str = "AKS" self.compute_location = compute_location self.provisioning_state = None self.description = description @@ -561,7 +636,7 @@ def __init__( self.disable_local_auth = disable_local_auth -class AksComputeSecretsProperties(msrest.serialization.Model): +class AksComputeSecretsProperties(_serialization.Model): """Properties of AksComputeSecrets. :ivar user_kube_config: Content of kubeconfig file that can be used to connect to the @@ -575,9 +650,9 @@ class AksComputeSecretsProperties(msrest.serialization.Model): """ _attribute_map = { - 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'}, - 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'}, - 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'}, + "user_kube_config": {"key": "userKubeConfig", "type": "str"}, + "admin_kube_config": {"key": "adminKubeConfig", "type": "str"}, + "image_pull_secret_name": {"key": "imagePullSecretName", "type": "str"}, } def __init__( @@ -586,8 +661,8 @@ def __init__( user_kube_config: Optional[str] = None, admin_kube_config: Optional[str] = None, image_pull_secret_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword user_kube_config: Content of kubeconfig file that can be used to connect to the Kubernetes cluster. @@ -598,46 +673,46 @@ def __init__( :keyword image_pull_secret_name: Image registry pull secret. :paramtype image_pull_secret_name: str """ - super(AksComputeSecretsProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.user_kube_config = user_kube_config self.admin_kube_config = admin_kube_config self.image_pull_secret_name = image_pull_secret_name -class ComputeSecrets(msrest.serialization.Model): +class ComputeSecrets(_serialization.Model): """Secrets related to a Machine Learning compute. Might differ for every type of compute. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AksComputeSecrets, DatabricksComputeSecrets, VirtualMachineSecrets All required parameters must be populated in order to send to Azure. - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType """ _validation = { - 'compute_type': {'required': True}, + "compute_type": {"required": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, + "compute_type": {"key": "computeType", "type": "str"}, } _subtype_map = { - 'compute_type': {'AKS': 'AksComputeSecrets', 'Databricks': 'DatabricksComputeSecrets', 'VirtualMachine': 'VirtualMachineSecrets'} + "compute_type": { + "AKS": "AksComputeSecrets", + "Databricks": "DatabricksComputeSecrets", + "VirtualMachine": "VirtualMachineSecrets", + } } - def __init__( - self, - **kwargs - ): - """ - """ - super(ComputeSecrets, self).__init__(**kwargs) - self.compute_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.compute_type: Optional[str] = None class AksComputeSecrets(ComputeSecrets, AksComputeSecretsProperties): @@ -653,21 +728,21 @@ class AksComputeSecrets(ComputeSecrets, AksComputeSecretsProperties): :vartype admin_kube_config: str :ivar image_pull_secret_name: Image registry pull secret. :vartype image_pull_secret_name: str - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType """ _validation = { - 'compute_type': {'required': True}, + "compute_type": {"required": True}, } _attribute_map = { - 'user_kube_config': {'key': 'userKubeConfig', 'type': 'str'}, - 'admin_kube_config': {'key': 'adminKubeConfig', 'type': 'str'}, - 'image_pull_secret_name': {'key': 'imagePullSecretName', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, + "user_kube_config": {"key": "userKubeConfig", "type": "str"}, + "admin_kube_config": {"key": "adminKubeConfig", "type": "str"}, + "image_pull_secret_name": {"key": "imagePullSecretName", "type": "str"}, + "compute_type": {"key": "computeType", "type": "str"}, } def __init__( @@ -676,8 +751,8 @@ def __init__( user_kube_config: Optional[str] = None, admin_kube_config: Optional[str] = None, image_pull_secret_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword user_kube_config: Content of kubeconfig file that can be used to connect to the Kubernetes cluster. @@ -688,14 +763,19 @@ def __init__( :keyword image_pull_secret_name: Image registry pull secret. :paramtype image_pull_secret_name: str """ - super(AksComputeSecrets, self).__init__(user_kube_config=user_kube_config, admin_kube_config=admin_kube_config, image_pull_secret_name=image_pull_secret_name, **kwargs) + super().__init__( + user_kube_config=user_kube_config, + admin_kube_config=admin_kube_config, + image_pull_secret_name=image_pull_secret_name, + **kwargs + ) self.user_kube_config = user_kube_config self.admin_kube_config = admin_kube_config self.image_pull_secret_name = image_pull_secret_name - self.compute_type = 'AKS' # type: str + self.compute_type: str = "AKS" -class AksNetworkingConfiguration(msrest.serialization.Model): +class AksNetworkingConfiguration(_serialization.Model): """Advance configuration for AKS networking. :ivar subnet_id: Virtual network subnet resource ID the compute nodes belong to. @@ -712,16 +792,18 @@ class AksNetworkingConfiguration(msrest.serialization.Model): """ _validation = { - 'service_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, - 'dns_service_ip': {'pattern': r'^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$'}, - 'docker_bridge_cidr': {'pattern': r'^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$'}, + "service_cidr": {"pattern": r"^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$"}, + "dns_service_ip": { + "pattern": r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$" + }, + "docker_bridge_cidr": {"pattern": r"^([0-9]{1,3}\.){3}[0-9]{1,3}(\/([0-9]|[1-2][0-9]|3[0-2]))?$"}, } _attribute_map = { - 'subnet_id': {'key': 'subnetId', 'type': 'str'}, - 'service_cidr': {'key': 'serviceCidr', 'type': 'str'}, - 'dns_service_ip': {'key': 'dnsServiceIP', 'type': 'str'}, - 'docker_bridge_cidr': {'key': 'dockerBridgeCidr', 'type': 'str'}, + "subnet_id": {"key": "subnetId", "type": "str"}, + "service_cidr": {"key": "serviceCidr", "type": "str"}, + "dns_service_ip": {"key": "dnsServiceIP", "type": "str"}, + "docker_bridge_cidr": {"key": "dockerBridgeCidr", "type": "str"}, } def __init__( @@ -731,8 +813,8 @@ def __init__( service_cidr: Optional[str] = None, dns_service_ip: Optional[str] = None, docker_bridge_cidr: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword subnet_id: Virtual network subnet resource ID the compute nodes belong to. :paramtype subnet_id: str @@ -746,14 +828,14 @@ def __init__( must not overlap with any Subnet IP ranges or the Kubernetes service address range. :paramtype docker_bridge_cidr: str """ - super(AksNetworkingConfiguration, self).__init__(**kwargs) + super().__init__(**kwargs) self.subnet_id = subnet_id self.service_cidr = service_cidr self.dns_service_ip = dns_service_ip self.docker_bridge_cidr = docker_bridge_cidr -class AKSSchemaProperties(msrest.serialization.Model): +class AKSSchemaProperties(_serialization.Model): """AKS properties. Variables are only populated by the server, and will be ignored when sending a request. @@ -766,36 +848,36 @@ class AKSSchemaProperties(msrest.serialization.Model): :vartype agent_count: int :ivar agent_vm_size: Agent virtual machine size. :vartype agent_vm_size: str - :ivar cluster_purpose: Intended usage of the cluster. Possible values include: "FastProd", - "DenseProd", "DevTest". Default value: "FastProd". + :ivar cluster_purpose: Intended usage of the cluster. Known values are: "FastProd", + "DenseProd", and "DevTest". :vartype cluster_purpose: str or ~azure.mgmt.machinelearningservices.models.ClusterPurpose :ivar ssl_configuration: SSL configuration. :vartype ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration :ivar aks_networking_configuration: AKS networking configuration for vnet. :vartype aks_networking_configuration: ~azure.mgmt.machinelearningservices.models.AksNetworkingConfiguration - :ivar load_balancer_type: Load Balancer Type. Possible values include: "PublicIp", - "InternalLoadBalancer". Default value: "PublicIp". + :ivar load_balancer_type: Load Balancer Type. Known values are: "PublicIp" and + "InternalLoadBalancer". :vartype load_balancer_type: str or ~azure.mgmt.machinelearningservices.models.LoadBalancerType :ivar load_balancer_subnet: Load Balancer Subnet. :vartype load_balancer_subnet: str """ _validation = { - 'system_services': {'readonly': True}, - 'agent_count': {'minimum': 0}, + "system_services": {"readonly": True}, + "agent_count": {"minimum": 0}, } _attribute_map = { - 'cluster_fqdn': {'key': 'clusterFqdn', 'type': 'str'}, - 'system_services': {'key': 'systemServices', 'type': '[SystemService]'}, - 'agent_count': {'key': 'agentCount', 'type': 'int'}, - 'agent_vm_size': {'key': 'agentVmSize', 'type': 'str'}, - 'cluster_purpose': {'key': 'clusterPurpose', 'type': 'str'}, - 'ssl_configuration': {'key': 'sslConfiguration', 'type': 'SslConfiguration'}, - 'aks_networking_configuration': {'key': 'aksNetworkingConfiguration', 'type': 'AksNetworkingConfiguration'}, - 'load_balancer_type': {'key': 'loadBalancerType', 'type': 'str'}, - 'load_balancer_subnet': {'key': 'loadBalancerSubnet', 'type': 'str'}, + "cluster_fqdn": {"key": "clusterFqdn", "type": "str"}, + "system_services": {"key": "systemServices", "type": "[SystemService]"}, + "agent_count": {"key": "agentCount", "type": "int"}, + "agent_vm_size": {"key": "agentVmSize", "type": "str"}, + "cluster_purpose": {"key": "clusterPurpose", "type": "str"}, + "ssl_configuration": {"key": "sslConfiguration", "type": "SslConfiguration"}, + "aks_networking_configuration": {"key": "aksNetworkingConfiguration", "type": "AksNetworkingConfiguration"}, + "load_balancer_type": {"key": "loadBalancerType", "type": "str"}, + "load_balancer_subnet": {"key": "loadBalancerSubnet", "type": "str"}, } def __init__( @@ -804,13 +886,13 @@ def __init__( cluster_fqdn: Optional[str] = None, agent_count: Optional[int] = None, agent_vm_size: Optional[str] = None, - cluster_purpose: Optional[Union[str, "ClusterPurpose"]] = "FastProd", - ssl_configuration: Optional["SslConfiguration"] = None, - aks_networking_configuration: Optional["AksNetworkingConfiguration"] = None, - load_balancer_type: Optional[Union[str, "LoadBalancerType"]] = "PublicIp", + cluster_purpose: Union[str, "_models.ClusterPurpose"] = "FastProd", + ssl_configuration: Optional["_models.SslConfiguration"] = None, + aks_networking_configuration: Optional["_models.AksNetworkingConfiguration"] = None, + load_balancer_type: Union[str, "_models.LoadBalancerType"] = "PublicIp", load_balancer_subnet: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword cluster_fqdn: Cluster full qualified domain name. :paramtype cluster_fqdn: str @@ -818,22 +900,22 @@ def __init__( :paramtype agent_count: int :keyword agent_vm_size: Agent virtual machine size. :paramtype agent_vm_size: str - :keyword cluster_purpose: Intended usage of the cluster. Possible values include: "FastProd", - "DenseProd", "DevTest". Default value: "FastProd". + :keyword cluster_purpose: Intended usage of the cluster. Known values are: "FastProd", + "DenseProd", and "DevTest". :paramtype cluster_purpose: str or ~azure.mgmt.machinelearningservices.models.ClusterPurpose :keyword ssl_configuration: SSL configuration. :paramtype ssl_configuration: ~azure.mgmt.machinelearningservices.models.SslConfiguration :keyword aks_networking_configuration: AKS networking configuration for vnet. :paramtype aks_networking_configuration: ~azure.mgmt.machinelearningservices.models.AksNetworkingConfiguration - :keyword load_balancer_type: Load Balancer Type. Possible values include: "PublicIp", - "InternalLoadBalancer". Default value: "PublicIp". + :keyword load_balancer_type: Load Balancer Type. Known values are: "PublicIp" and + "InternalLoadBalancer". :paramtype load_balancer_type: str or ~azure.mgmt.machinelearningservices.models.LoadBalancerType :keyword load_balancer_subnet: Load Balancer Subnet. :paramtype load_balancer_subnet: str """ - super(AKSSchemaProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.cluster_fqdn = cluster_fqdn self.system_services = None self.agent_count = agent_count @@ -845,41 +927,41 @@ def __init__( self.load_balancer_subnet = load_balancer_subnet -class MonitoringFeatureFilterBase(msrest.serialization.Model): +class MonitoringFeatureFilterBase(_serialization.Model): """MonitoringFeatureFilterBase. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AllFeatures, FeatureSubset, TopNFeaturesByAttribution. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AllFeatures, FeatureSubset, TopNFeaturesByAttribution All required parameters must be populated in order to send to Azure. - :ivar filter_type: Required. [Required] Specifies the feature filter to leverage when selecting - features to calculate metrics over.Constant filled by server. Possible values include: - "AllFeatures", "TopNByAttribution", "FeatureSubset". + :ivar filter_type: [Required] Specifies the feature filter to leverage when selecting features + to calculate metrics over. Required. Known values are: "AllFeatures", "TopNByAttribution", and + "FeatureSubset". :vartype filter_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterType """ _validation = { - 'filter_type': {'required': True}, + "filter_type": {"required": True}, } _attribute_map = { - 'filter_type': {'key': 'filterType', 'type': 'str'}, + "filter_type": {"key": "filterType", "type": "str"}, } _subtype_map = { - 'filter_type': {'AllFeatures': 'AllFeatures', 'FeatureSubset': 'FeatureSubset', 'TopNByAttribution': 'TopNFeaturesByAttribution'} + "filter_type": { + "AllFeatures": "AllFeatures", + "FeatureSubset": "FeatureSubset", + "TopNByAttribution": "TopNFeaturesByAttribution", + } } - def __init__( - self, - **kwargs - ): - """ - """ - super(MonitoringFeatureFilterBase, self).__init__(**kwargs) - self.filter_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.filter_type: Optional[str] = None class AllFeatures(MonitoringFeatureFilterBase): @@ -887,64 +969,54 @@ class AllFeatures(MonitoringFeatureFilterBase): All required parameters must be populated in order to send to Azure. - :ivar filter_type: Required. [Required] Specifies the feature filter to leverage when selecting - features to calculate metrics over.Constant filled by server. Possible values include: - "AllFeatures", "TopNByAttribution", "FeatureSubset". + :ivar filter_type: [Required] Specifies the feature filter to leverage when selecting features + to calculate metrics over. Required. Known values are: "AllFeatures", "TopNByAttribution", and + "FeatureSubset". :vartype filter_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterType """ _validation = { - 'filter_type': {'required': True}, + "filter_type": {"required": True}, } _attribute_map = { - 'filter_type': {'key': 'filterType', 'type': 'str'}, + "filter_type": {"key": "filterType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(AllFeatures, self).__init__(**kwargs) - self.filter_type = 'AllFeatures' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.filter_type: str = "AllFeatures" -class Nodes(msrest.serialization.Model): +class Nodes(_serialization.Model): """Abstract Nodes definition. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AllNodes. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AllNodes All required parameters must be populated in order to send to Azure. - :ivar nodes_value_type: Required. [Required] Type of the Nodes value.Constant filled by server. - Possible values include: "All", "Custom". + :ivar nodes_value_type: [Required] Type of the Nodes value. Required. Known values are: "All" + and "Custom". :vartype nodes_value_type: str or ~azure.mgmt.machinelearningservices.models.NodesValueType """ _validation = { - 'nodes_value_type': {'required': True}, + "nodes_value_type": {"required": True}, } _attribute_map = { - 'nodes_value_type': {'key': 'nodesValueType', 'type': 'str'}, + "nodes_value_type": {"key": "nodesValueType", "type": "str"}, } - _subtype_map = { - 'nodes_value_type': {'All': 'AllNodes'} - } + _subtype_map = {"nodes_value_type": {"All": "AllNodes"}} - def __init__( - self, - **kwargs - ): - """ - """ - super(Nodes, self).__init__(**kwargs) - self.nodes_value_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.nodes_value_type: Optional[str] = None class AllNodes(Nodes): @@ -952,30 +1024,26 @@ class AllNodes(Nodes): All required parameters must be populated in order to send to Azure. - :ivar nodes_value_type: Required. [Required] Type of the Nodes value.Constant filled by server. - Possible values include: "All", "Custom". + :ivar nodes_value_type: [Required] Type of the Nodes value. Required. Known values are: "All" + and "Custom". :vartype nodes_value_type: str or ~azure.mgmt.machinelearningservices.models.NodesValueType """ _validation = { - 'nodes_value_type': {'required': True}, + "nodes_value_type": {"required": True}, } _attribute_map = { - 'nodes_value_type': {'key': 'nodesValueType', 'type': 'str'}, + "nodes_value_type": {"key": "nodesValueType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(AllNodes, self).__init__(**kwargs) - self.nodes_value_type = 'All' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.nodes_value_type: str = "All" -class AmlComputeSchema(msrest.serialization.Model): +class AmlComputeSchema(_serialization.Model): """Properties(top level) of AmlCompute. :ivar properties: Properties of AmlCompute. @@ -983,24 +1051,19 @@ class AmlComputeSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, + "properties": {"key": "properties", "type": "AmlComputeProperties"}, } - def __init__( - self, - *, - properties: Optional["AmlComputeProperties"] = None, - **kwargs - ): + def __init__(self, *, properties: Optional["_models.AmlComputeProperties"] = None, **kwargs: Any) -> None: """ :keyword properties: Properties of AmlCompute. :paramtype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties """ - super(AmlComputeSchema, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class AmlCompute(Compute, AmlComputeSchema): +class AmlCompute(Compute, AmlComputeSchema): # pylint: disable=too-many-instance-attributes """An Azure Machine Learning compute. Variables are only populated by the server, and will be ignored when sending a request. @@ -1009,15 +1072,15 @@ class AmlCompute(Compute, AmlComputeSchema): :ivar properties: Properties of AmlCompute. :vartype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType :ivar compute_location: Location for the underlying compute. :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ProvisioningState :ivar description: The description of the Machine Learning compute. @@ -1039,38 +1102,38 @@ class AmlCompute(Compute, AmlComputeSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'AmlComputeProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "AmlComputeProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } def __init__( self, *, - properties: Optional["AmlComputeProperties"] = None, + properties: Optional["_models.AmlComputeProperties"] = None, compute_location: Optional[str] = None, description: Optional[str] = None, resource_id: Optional[str] = None, disable_local_auth: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword properties: Properties of AmlCompute. :paramtype properties: ~azure.mgmt.machinelearningservices.models.AmlComputeProperties @@ -1084,9 +1147,16 @@ def __init__( MSI and AAD exclusively for authentication. :paramtype disable_local_auth: bool """ - super(AmlCompute, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) + super().__init__( + compute_location=compute_location, + description=description, + resource_id=resource_id, + disable_local_auth=disable_local_auth, + properties=properties, + **kwargs + ) self.properties = properties - self.compute_type = 'AmlCompute' # type: str + self.compute_type: str = "AmlCompute" self.compute_location = compute_location self.provisioning_state = None self.description = description @@ -1098,7 +1168,7 @@ def __init__( self.disable_local_auth = disable_local_auth -class AmlComputeNodeInformation(msrest.serialization.Model): +class AmlComputeNodeInformation(_serialization.Model): """Compute node information related to a AmlCompute. Variables are only populated by the server, and will be ignored when sending a request. @@ -1112,38 +1182,34 @@ class AmlComputeNodeInformation(msrest.serialization.Model): :ivar port: SSH port number of the node. :vartype port: int :ivar node_state: State of the compute node. Values are idle, running, preparing, unusable, - leaving and preempted. Possible values include: "idle", "running", "preparing", "unusable", - "leaving", "preempted". + leaving and preempted. Known values are: "idle", "running", "preparing", "unusable", "leaving", + and "preempted". :vartype node_state: str or ~azure.mgmt.machinelearningservices.models.NodeState :ivar run_id: ID of the Experiment running on the node, if any else null. :vartype run_id: str """ _validation = { - 'node_id': {'readonly': True}, - 'private_ip_address': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'port': {'readonly': True}, - 'node_state': {'readonly': True}, - 'run_id': {'readonly': True}, + "node_id": {"readonly": True}, + "private_ip_address": {"readonly": True}, + "public_ip_address": {"readonly": True}, + "port": {"readonly": True}, + "node_state": {"readonly": True}, + "run_id": {"readonly": True}, } _attribute_map = { - 'node_id': {'key': 'nodeId', 'type': 'str'}, - 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'port': {'key': 'port', 'type': 'int'}, - 'node_state': {'key': 'nodeState', 'type': 'str'}, - 'run_id': {'key': 'runId', 'type': 'str'}, + "node_id": {"key": "nodeId", "type": "str"}, + "private_ip_address": {"key": "privateIpAddress", "type": "str"}, + "public_ip_address": {"key": "publicIpAddress", "type": "str"}, + "port": {"key": "port", "type": "int"}, + "node_state": {"key": "nodeState", "type": "str"}, + "run_id": {"key": "runId", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(AmlComputeNodeInformation, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.node_id = None self.private_ip_address = None self.public_ip_address = None @@ -1152,7 +1218,7 @@ def __init__( self.run_id = None -class AmlComputeNodesInformation(msrest.serialization.Model): +class AmlComputeNodesInformation(_serialization.Model): """Result of AmlCompute Nodes. Variables are only populated by the server, and will be ignored when sending a request. @@ -1164,38 +1230,32 @@ class AmlComputeNodesInformation(msrest.serialization.Model): """ _validation = { - 'nodes': {'readonly': True}, - 'next_link': {'readonly': True}, + "nodes": {"readonly": True}, + "next_link": {"readonly": True}, } _attribute_map = { - 'nodes': {'key': 'nodes', 'type': '[AmlComputeNodeInformation]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "nodes": {"key": "nodes", "type": "[AmlComputeNodeInformation]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(AmlComputeNodesInformation, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.nodes = None self.next_link = None -class AmlComputeProperties(msrest.serialization.Model): +class AmlComputeProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes """AML Compute properties. Variables are only populated by the server, and will be ignored when sending a request. - :ivar os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value: - "Linux". + :ivar os_type: Compute OS Type. Known values are: "Linux" and "Windows". :vartype os_type: str or ~azure.mgmt.machinelearningservices.models.OsType :ivar vm_size: Virtual Machine Size. :vartype vm_size: str - :ivar vm_priority: Virtual Machine priority. Possible values include: "Dedicated", - "LowPriority". + :ivar vm_priority: Virtual Machine priority. Known values are: "Dedicated" and "LowPriority". :vartype vm_priority: str or ~azure.mgmt.machinelearningservices.models.VmPriority :ivar virtual_machine_image: Virtual Machine image for AML Compute - windows only. :vartype virtual_machine_image: ~azure.mgmt.machinelearningservices.models.VirtualMachineImage @@ -1214,8 +1274,8 @@ class AmlComputeProperties(msrest.serialization.Model): Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined, else is open all public nodes. It can be default only during cluster creation time, after - creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled", - "NotSpecified". Default value: "NotSpecified". + creation it will be either enabled or disabled. Known values are: "Enabled", "Disabled", and + "NotSpecified". :vartype remote_login_port_public_access: str or ~azure.mgmt.machinelearningservices.models.RemoteLoginPortPublicAccess :ivar allocation_state: Allocation state of the compute. Possible values are: steady - @@ -1223,7 +1283,7 @@ class AmlComputeProperties(msrest.serialization.Model): in the compute in progress. A compute enters this state when it is created and when no operations are being performed on the compute to change the number of compute nodes. resizing - Indicates that the compute is resizing; that is, compute nodes are being added to or removed - from the compute. Possible values include: "Steady", "Resizing". + from the compute. Known values are: "Steady" and "Resizing". :vartype allocation_state: str or ~azure.mgmt.machinelearningservices.models.AllocationState :ivar allocation_state_transition_time: The time at which the compute entered its current allocation state. @@ -1245,61 +1305,60 @@ class AmlComputeProperties(msrest.serialization.Model): public IPs. :vartype enable_node_public_ip: bool :ivar property_bag: A property bag containing additional properties. - :vartype property_bag: any + :vartype property_bag: JSON """ _validation = { - 'allocation_state': {'readonly': True}, - 'allocation_state_transition_time': {'readonly': True}, - 'errors': {'readonly': True}, - 'current_node_count': {'readonly': True}, - 'target_node_count': {'readonly': True}, - 'node_state_counts': {'readonly': True}, + "allocation_state": {"readonly": True}, + "allocation_state_transition_time": {"readonly": True}, + "errors": {"readonly": True}, + "current_node_count": {"readonly": True}, + "target_node_count": {"readonly": True}, + "node_state_counts": {"readonly": True}, } _attribute_map = { - 'os_type': {'key': 'osType', 'type': 'str'}, - 'vm_size': {'key': 'vmSize', 'type': 'str'}, - 'vm_priority': {'key': 'vmPriority', 'type': 'str'}, - 'virtual_machine_image': {'key': 'virtualMachineImage', 'type': 'VirtualMachineImage'}, - 'isolated_network': {'key': 'isolatedNetwork', 'type': 'bool'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, - 'user_account_credentials': {'key': 'userAccountCredentials', 'type': 'UserAccountCredentials'}, - 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, - 'remote_login_port_public_access': {'key': 'remoteLoginPortPublicAccess', 'type': 'str'}, - 'allocation_state': {'key': 'allocationState', 'type': 'str'}, - 'allocation_state_transition_time': {'key': 'allocationStateTransitionTime', 'type': 'iso-8601'}, - 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, - 'current_node_count': {'key': 'currentNodeCount', 'type': 'int'}, - 'target_node_count': {'key': 'targetNodeCount', 'type': 'int'}, - 'node_state_counts': {'key': 'nodeStateCounts', 'type': 'NodeStateCounts'}, - 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'}, - 'property_bag': {'key': 'propertyBag', 'type': 'object'}, + "os_type": {"key": "osType", "type": "str"}, + "vm_size": {"key": "vmSize", "type": "str"}, + "vm_priority": {"key": "vmPriority", "type": "str"}, + "virtual_machine_image": {"key": "virtualMachineImage", "type": "VirtualMachineImage"}, + "isolated_network": {"key": "isolatedNetwork", "type": "bool"}, + "scale_settings": {"key": "scaleSettings", "type": "ScaleSettings"}, + "user_account_credentials": {"key": "userAccountCredentials", "type": "UserAccountCredentials"}, + "subnet": {"key": "subnet", "type": "ResourceId"}, + "remote_login_port_public_access": {"key": "remoteLoginPortPublicAccess", "type": "str"}, + "allocation_state": {"key": "allocationState", "type": "str"}, + "allocation_state_transition_time": {"key": "allocationStateTransitionTime", "type": "iso-8601"}, + "errors": {"key": "errors", "type": "[ErrorResponse]"}, + "current_node_count": {"key": "currentNodeCount", "type": "int"}, + "target_node_count": {"key": "targetNodeCount", "type": "int"}, + "node_state_counts": {"key": "nodeStateCounts", "type": "NodeStateCounts"}, + "enable_node_public_ip": {"key": "enableNodePublicIp", "type": "bool"}, + "property_bag": {"key": "propertyBag", "type": "object"}, } def __init__( self, *, - os_type: Optional[Union[str, "OsType"]] = "Linux", + os_type: Union[str, "_models.OsType"] = "Linux", vm_size: Optional[str] = None, - vm_priority: Optional[Union[str, "VmPriority"]] = None, - virtual_machine_image: Optional["VirtualMachineImage"] = None, + vm_priority: Optional[Union[str, "_models.VmPriority"]] = None, + virtual_machine_image: Optional["_models.VirtualMachineImage"] = None, isolated_network: Optional[bool] = None, - scale_settings: Optional["ScaleSettings"] = None, - user_account_credentials: Optional["UserAccountCredentials"] = None, - subnet: Optional["ResourceId"] = None, - remote_login_port_public_access: Optional[Union[str, "RemoteLoginPortPublicAccess"]] = "NotSpecified", - enable_node_public_ip: Optional[bool] = True, - property_bag: Optional[Any] = None, - **kwargs - ): - """ - :keyword os_type: Compute OS Type. Possible values include: "Linux", "Windows". Default value: - "Linux". + scale_settings: Optional["_models.ScaleSettings"] = None, + user_account_credentials: Optional["_models.UserAccountCredentials"] = None, + subnet: Optional["_models.ResourceId"] = None, + remote_login_port_public_access: Union[str, "_models.RemoteLoginPortPublicAccess"] = "NotSpecified", + enable_node_public_ip: bool = True, + property_bag: Optional[JSON] = None, + **kwargs: Any + ) -> None: + """ + :keyword os_type: Compute OS Type. Known values are: "Linux" and "Windows". :paramtype os_type: str or ~azure.mgmt.machinelearningservices.models.OsType :keyword vm_size: Virtual Machine Size. :paramtype vm_size: str - :keyword vm_priority: Virtual Machine priority. Possible values include: "Dedicated", + :keyword vm_priority: Virtual Machine priority. Known values are: "Dedicated" and "LowPriority". :paramtype vm_priority: str or ~azure.mgmt.machinelearningservices.models.VmPriority :keyword virtual_machine_image: Virtual Machine image for AML Compute - windows only. @@ -1320,8 +1379,8 @@ def __init__( Indicates that the public ssh port is open on all nodes of the cluster. NotSpecified - Indicates that the public ssh port is closed on all nodes of the cluster if VNet is defined, else is open all public nodes. It can be default only during cluster creation time, after - creation it will be either enabled or disabled. Possible values include: "Enabled", "Disabled", - "NotSpecified". Default value: "NotSpecified". + creation it will be either enabled or disabled. Known values are: "Enabled", "Disabled", and + "NotSpecified". :paramtype remote_login_port_public_access: str or ~azure.mgmt.machinelearningservices.models.RemoteLoginPortPublicAccess :keyword enable_node_public_ip: Enable or disable node public IP address provisioning. Possible @@ -1330,9 +1389,9 @@ def __init__( public IPs. :paramtype enable_node_public_ip: bool :keyword property_bag: A property bag containing additional properties. - :paramtype property_bag: any + :paramtype property_bag: JSON """ - super(AmlComputeProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.os_type = os_type self.vm_size = vm_size self.vm_priority = vm_priority @@ -1352,113 +1411,36 @@ def __init__( self.property_bag = property_bag -class AmlOperation(msrest.serialization.Model): - """Azure Machine Learning team account REST API operation. - - :ivar display: Gets or sets display name of operation. - :vartype display: ~azure.mgmt.machinelearningservices.models.OperationDisplay - :ivar is_data_action: Indicates whether the operation applies to data-plane. - :vartype is_data_action: bool - :ivar name: Gets or sets operation name: {provider}/{resource}/{operation}. - :vartype name: str - :ivar origin: The intended executor of the operation: user/system. - :vartype origin: str - """ - - _attribute_map = { - 'display': {'key': 'display', 'type': 'OperationDisplay'}, - 'is_data_action': {'key': 'isDataAction', 'type': 'bool'}, - 'name': {'key': 'name', 'type': 'str'}, - 'origin': {'key': 'origin', 'type': 'str'}, - } - - def __init__( - self, - *, - display: Optional["OperationDisplay"] = None, - is_data_action: Optional[bool] = None, - name: Optional[str] = None, - origin: Optional[str] = None, - **kwargs - ): - """ - :keyword display: Gets or sets display name of operation. - :paramtype display: ~azure.mgmt.machinelearningservices.models.OperationDisplay - :keyword is_data_action: Indicates whether the operation applies to data-plane. - :paramtype is_data_action: bool - :keyword name: Gets or sets operation name: {provider}/{resource}/{operation}. - :paramtype name: str - :keyword origin: The intended executor of the operation: user/system. - :paramtype origin: str - """ - super(AmlOperation, self).__init__(**kwargs) - self.display = display - self.is_data_action = is_data_action - self.name = name - self.origin = origin - - -class AmlOperationListResult(msrest.serialization.Model): - """An array of operations supported by the resource provider. - - :ivar value: Gets or sets list of AML team account operations supported by the - AML team account resource provider. - :vartype value: list[~azure.mgmt.machinelearningservices.models.AmlOperation] - """ - - _attribute_map = { - 'value': {'key': 'value', 'type': '[AmlOperation]'}, - } - - def __init__( - self, - *, - value: Optional[List["AmlOperation"]] = None, - **kwargs - ): - """ - :keyword value: Gets or sets list of AML team account operations supported by the - AML team account resource provider. - :paramtype value: list[~azure.mgmt.machinelearningservices.models.AmlOperation] - """ - super(AmlOperationListResult, self).__init__(**kwargs) - self.value = value - - -class IdentityConfiguration(msrest.serialization.Model): +class IdentityConfiguration(_serialization.Model): """Base definition for identity configuration. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmlToken, ManagedIdentity, UserIdentity. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AmlToken, ManagedIdentity, UserIdentity All required parameters must be populated in order to send to Azure. - :ivar identity_type: Required. [Required] Specifies the type of identity framework.Constant - filled by server. Possible values include: "Managed", "AMLToken", "UserIdentity". + :ivar identity_type: [Required] Specifies the type of identity framework. Required. Known + values are: "Managed", "AMLToken", and "UserIdentity". :vartype identity_type: str or ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType """ _validation = { - 'identity_type': {'required': True}, + "identity_type": {"required": True}, } _attribute_map = { - 'identity_type': {'key': 'identityType', 'type': 'str'}, + "identity_type": {"key": "identityType", "type": "str"}, } _subtype_map = { - 'identity_type': {'AMLToken': 'AmlToken', 'Managed': 'ManagedIdentity', 'UserIdentity': 'UserIdentity'} + "identity_type": {"AMLToken": "AmlToken", "Managed": "ManagedIdentity", "UserIdentity": "UserIdentity"} } - def __init__( - self, - **kwargs - ): - """ - """ - super(IdentityConfiguration, self).__init__(**kwargs) - self.identity_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.identity_type: Optional[str] = None class AmlToken(IdentityConfiguration): @@ -1466,64 +1448,56 @@ class AmlToken(IdentityConfiguration): All required parameters must be populated in order to send to Azure. - :ivar identity_type: Required. [Required] Specifies the type of identity framework.Constant - filled by server. Possible values include: "Managed", "AMLToken", "UserIdentity". + :ivar identity_type: [Required] Specifies the type of identity framework. Required. Known + values are: "Managed", "AMLToken", and "UserIdentity". :vartype identity_type: str or ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType """ _validation = { - 'identity_type': {'required': True}, + "identity_type": {"required": True}, } _attribute_map = { - 'identity_type': {'key': 'identityType', 'type': 'str'}, + "identity_type": {"key": "identityType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(AmlToken, self).__init__(**kwargs) - self.identity_type = 'AMLToken' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.identity_type: str = "AMLToken" -class MonitorComputeIdentityBase(msrest.serialization.Model): +class MonitorComputeIdentityBase(_serialization.Model): """Monitor compute identity base definition. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AmlTokenComputeIdentity, ManagedComputeIdentity. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AmlTokenComputeIdentity, ManagedComputeIdentity All required parameters must be populated in order to send to Azure. - :ivar compute_identity_type: Required. [Required] Monitor compute identity type enum.Constant - filled by server. Possible values include: "AmlToken", "ManagedIdentity". + :ivar compute_identity_type: [Required] Monitor compute identity type enum. Required. Known + values are: "AmlToken" and "ManagedIdentity". :vartype compute_identity_type: str or ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityType """ _validation = { - 'compute_identity_type': {'required': True}, + "compute_identity_type": {"required": True}, } _attribute_map = { - 'compute_identity_type': {'key': 'computeIdentityType', 'type': 'str'}, + "compute_identity_type": {"key": "computeIdentityType", "type": "str"}, } _subtype_map = { - 'compute_identity_type': {'AmlToken': 'AmlTokenComputeIdentity', 'ManagedIdentity': 'ManagedComputeIdentity'} + "compute_identity_type": {"AmlToken": "AmlTokenComputeIdentity", "ManagedIdentity": "ManagedComputeIdentity"} } - def __init__( - self, - **kwargs - ): - """ - """ - super(MonitorComputeIdentityBase, self).__init__(**kwargs) - self.compute_identity_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.compute_identity_type: Optional[str] = None class AmlTokenComputeIdentity(MonitorComputeIdentityBase): @@ -1531,31 +1505,27 @@ class AmlTokenComputeIdentity(MonitorComputeIdentityBase): All required parameters must be populated in order to send to Azure. - :ivar compute_identity_type: Required. [Required] Monitor compute identity type enum.Constant - filled by server. Possible values include: "AmlToken", "ManagedIdentity". + :ivar compute_identity_type: [Required] Monitor compute identity type enum. Required. Known + values are: "AmlToken" and "ManagedIdentity". :vartype compute_identity_type: str or ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityType """ _validation = { - 'compute_identity_type': {'required': True}, + "compute_identity_type": {"required": True}, } _attribute_map = { - 'compute_identity_type': {'key': 'computeIdentityType', 'type': 'str'}, + "compute_identity_type": {"key": "computeIdentityType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(AmlTokenComputeIdentity, self).__init__(**kwargs) - self.compute_identity_type = 'AmlToken' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.compute_identity_type: str = "AmlToken" -class AmlUserFeature(msrest.serialization.Model): +class AmlUserFeature(_serialization.Model): """Features enabled for a workspace. :ivar id: Specifies the feature ID. @@ -1567,19 +1537,19 @@ class AmlUserFeature(msrest.serialization.Model): """ _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "description": {"key": "description", "type": "str"}, } def __init__( self, *, - id: Optional[str] = None, + id: Optional[str] = None, # pylint: disable=redefined-builtin display_name: Optional[str] = None, description: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword id: Specifies the feature ID. :paramtype id: str @@ -1588,7 +1558,7 @@ def __init__( :keyword description: Describes the feature for user experience. :paramtype description: str """ - super(AmlUserFeature, self).__init__(**kwargs) + super().__init__(**kwargs) self.id = id self.display_name = display_name self.description = description @@ -1596,41 +1566,51 @@ def __init__( class ApiKeyAuthWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """This connection type covers the generic ApiKey auth connection categories, for examples: -AzureOpenAI: - Category:= AzureOpenAI - AuthType:= ApiKey (as type discriminator) - Credentials:= {ApiKey} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey - Target:= {ApiBase} - -CognitiveService: - Category:= CognitiveService - AuthType:= ApiKey (as type discriminator) - Credentials:= {SubscriptionKey} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey - Target:= ServiceRegion={serviceRegion} + AzureOpenAI: + Category:= AzureOpenAI + AuthType:= ApiKey (as type discriminator) + Credentials:= {ApiKey} as + Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey + Target:= {ApiBase} + + CognitiveService: + Category:= CognitiveService + AuthType:= ApiKey (as type discriminator) + Credentials:= {SubscriptionKey} as + Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey + Target:= ServiceRegion={serviceRegion} + + CognitiveSearch: + Category:= CognitiveSearch + AuthType:= ApiKey (as type discriminator) + Credentials:= {Key} as + Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey + Target:= {Endpoint} + + Use Metadata property bag for ApiType, ApiVersion, Kind and other metadata fields. -CognitiveSearch: - Category:= CognitiveSearch - AuthType:= ApiKey (as type discriminator) - Credentials:= {Key} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey - Target:= {Endpoint} - -Use Metadata property bag for ApiType, ApiVersion, Kind and other metadata fields. + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", + :ivar category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar created_by_workspace_arm_id: The arm id of the workspace which created this connection. + :vartype created_by_workspace_arm_id: str :ivar expiry_time: :vartype expiry_time: ~datetime.datetime + :ivar is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :vartype is_shared_to_all: bool :ivar metadata: Any object. - :vartype metadata: any + :vartype metadata: JSON :ivar target: :vartype target: str :ivar credentials: Api key object for workspace connection credential. @@ -1638,49 +1618,63 @@ class ApiKeyAuthWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """ _validation = { - 'auth_type': {'required': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionApiKey'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionApiKey"}, } def __init__( self, *, - category: Optional[Union[str, "ConnectionCategory"]] = None, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, expiry_time: Optional[datetime.datetime] = None, - metadata: Optional[Any] = None, + is_shared_to_all: Optional[bool] = None, + metadata: Optional[JSON] = None, target: Optional[str] = None, - credentials: Optional["WorkspaceConnectionApiKey"] = None, - **kwargs - ): + credentials: Optional["_models.WorkspaceConnectionApiKey"] = None, + **kwargs: Any + ) -> None: """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", + :keyword category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime + :keyword is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :paramtype is_shared_to_all: bool :keyword metadata: Any object. - :paramtype metadata: any + :paramtype metadata: JSON :keyword target: :paramtype target: str :keyword credentials: Api key object for workspace connection credential. :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionApiKey """ - super(ApiKeyAuthWorkspaceConnectionProperties, self).__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) - self.auth_type = 'ApiKey' # type: str + super().__init__( + category=category, + expiry_time=expiry_time, + is_shared_to_all=is_shared_to_all, + metadata=metadata, + target=target, + **kwargs + ) + self.auth_type: str = "ApiKey" self.credentials = credentials -class ArmResourceId(msrest.serialization.Model): +class ArmResourceId(_serialization.Model): """ARM ResourceId of a resource. :ivar resource_id: Arm ResourceId is in the format @@ -1691,15 +1685,10 @@ class ArmResourceId(msrest.serialization.Model): """ _attribute_map = { - 'resource_id': {'key': 'resourceId', 'type': 'str'}, + "resource_id": {"key": "resourceId", "type": "str"}, } - def __init__( - self, - *, - resource_id: Optional[str] = None, - **kwargs - ): + def __init__(self, *, resource_id: Optional[str] = None, **kwargs: Any) -> None: """ :keyword resource_id: Arm ResourceId is in the format "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.Storage/storageAccounts/{StorageAccountName}" @@ -1707,25 +1696,25 @@ def __init__( "/subscriptions/{SubscriptionId}/resourceGroups/{ResourceGroupName}/providers/Microsoft.ContainerRegistry/registries/{AcrName}". :paramtype resource_id: str """ - super(ArmResourceId, self).__init__(**kwargs) + super().__init__(**kwargs) self.resource_id = resource_id -class ResourceBase(msrest.serialization.Model): +class ResourceBase(_serialization.Model): """ResourceBase. :ivar description: The asset description text. :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] """ _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, } def __init__( @@ -1734,17 +1723,17 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] """ - super(ResourceBase, self).__init__(**kwargs) + super().__init__(**kwargs) self.description = description self.properties = properties self.tags = tags @@ -1757,7 +1746,7 @@ class AssetBase(ResourceBase): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -1770,12 +1759,12 @@ class AssetBase(ResourceBase): """ _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, } def __init__( @@ -1784,17 +1773,17 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - is_anonymous: Optional[bool] = False, - is_archived: Optional[bool] = False, - **kwargs - ): + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -1805,7 +1794,7 @@ def __init__( provided it will be used to populate IsArchived. :paramtype is_archived: bool """ - super(AssetBase, self).__init__(description=description, properties=properties, tags=tags, **kwargs) + super().__init__(description=description, properties=properties, tags=tags, **kwargs) self.auto_delete_setting = auto_delete_setting self.is_anonymous = is_anonymous self.is_archived = is_archived @@ -1820,7 +1809,7 @@ class AssetContainer(ResourceBase): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar is_archived: Is the asset archived?. :vartype is_archived: bool @@ -1831,17 +1820,17 @@ class AssetContainer(ResourceBase): """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, } def __init__( @@ -1850,66 +1839,62 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - is_archived: Optional[bool] = False, - **kwargs - ): + is_archived: bool = False, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword is_archived: Is the asset archived?. :paramtype is_archived: bool """ - super(AssetContainer, self).__init__(description=description, properties=properties, tags=tags, **kwargs) + super().__init__(description=description, properties=properties, tags=tags, **kwargs) self.is_archived = is_archived self.latest_version = None self.next_version = None -class AssetJobInput(msrest.serialization.Model): +class AssetJobInput(_serialization.Model): """Asset input type. All required parameters must be populated in order to send to Azure. - :ivar mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: Required. [Required] Input Asset URI. + :ivar uri: [Required] Input Asset URI. Required. :vartype uri: str """ _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( - self, - *, - uri: str, - mode: Optional[Union[str, "InputDeliveryMode"]] = None, - **kwargs - ): + self, *, uri: str, mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, **kwargs: Any + ) -> None: """ - :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: Required. [Required] Input Asset URI. + :keyword uri: [Required] Input Asset URI. Required. :paramtype uri: str """ - super(AssetJobInput, self).__init__(**kwargs) + super().__init__(**kwargs) self.mode = mode self.uri = uri -class AssetJobOutput(msrest.serialization.Model): +class AssetJobOutput(_serialization.Model): """Asset output type. :ivar asset_name: Output Asset Name. @@ -1918,7 +1903,7 @@ class AssetJobOutput(msrest.serialization.Model): :vartype asset_version: str :ivar auto_delete_setting: Auto delete setting of output data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and "Direct". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :ivar uri: Output Asset URI. @@ -1926,11 +1911,11 @@ class AssetJobOutput(msrest.serialization.Model): """ _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( @@ -1938,11 +1923,11 @@ def __init__( *, asset_name: Optional[str] = None, asset_version: Optional[str] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - mode: Optional[Union[str, "OutputDeliveryMode"]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, uri: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword asset_name: Output Asset Name. :paramtype asset_name: str @@ -1950,13 +1935,13 @@ def __init__( :paramtype asset_version: str :keyword auto_delete_setting: Auto delete setting of output data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and "Direct". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :keyword uri: Output Asset URI. :paramtype uri: str """ - super(AssetJobOutput, self).__init__(**kwargs) + super().__init__(**kwargs) self.asset_name = asset_name self.asset_version = asset_version self.auto_delete_setting = auto_delete_setting @@ -1964,147 +1949,135 @@ def __init__( self.uri = uri -class AssetReferenceBase(msrest.serialization.Model): +class AssetReferenceBase(_serialization.Model): """Base definition for asset references. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DataPathAssetReference, IdAssetReference, OutputPathAssetReference. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + DataPathAssetReference, IdAssetReference, OutputPathAssetReference All required parameters must be populated in order to send to Azure. - :ivar reference_type: Required. [Required] Specifies the type of asset reference.Constant - filled by server. Possible values include: "Id", "DataPath", "OutputPath". + :ivar reference_type: [Required] Specifies the type of asset reference. Required. Known values + are: "Id", "DataPath", and "OutputPath". :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType """ _validation = { - 'reference_type': {'required': True}, + "reference_type": {"required": True}, } _attribute_map = { - 'reference_type': {'key': 'referenceType', 'type': 'str'}, + "reference_type": {"key": "referenceType", "type": "str"}, } _subtype_map = { - 'reference_type': {'DataPath': 'DataPathAssetReference', 'Id': 'IdAssetReference', 'OutputPath': 'OutputPathAssetReference'} + "reference_type": { + "DataPath": "DataPathAssetReference", + "Id": "IdAssetReference", + "OutputPath": "OutputPathAssetReference", + } } - def __init__( - self, - **kwargs - ): - """ - """ - super(AssetReferenceBase, self).__init__(**kwargs) - self.reference_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.reference_type: Optional[str] = None -class AssignedUser(msrest.serialization.Model): +class AssignedUser(_serialization.Model): """A user that can be assigned to a compute instance. All required parameters must be populated in order to send to Azure. - :ivar object_id: Required. User’s AAD Object Id. + :ivar object_id: User’s AAD Object Id. Required. :vartype object_id: str - :ivar tenant_id: Required. User’s AAD Tenant Id. + :ivar tenant_id: User’s AAD Tenant Id. Required. :vartype tenant_id: str """ _validation = { - 'object_id': {'required': True}, - 'tenant_id': {'required': True}, + "object_id": {"required": True}, + "tenant_id": {"required": True}, } _attribute_map = { - 'object_id': {'key': 'objectId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + "object_id": {"key": "objectId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, } - def __init__( - self, - *, - object_id: str, - tenant_id: str, - **kwargs - ): + def __init__(self, *, object_id: str, tenant_id: str, **kwargs: Any) -> None: """ - :keyword object_id: Required. User’s AAD Object Id. + :keyword object_id: User’s AAD Object Id. Required. :paramtype object_id: str - :keyword tenant_id: Required. User’s AAD Tenant Id. + :keyword tenant_id: User’s AAD Tenant Id. Required. :paramtype tenant_id: str """ - super(AssignedUser, self).__init__(**kwargs) + super().__init__(**kwargs) self.object_id = object_id self.tenant_id = tenant_id -class AutoDeleteSetting(msrest.serialization.Model): +class AutoDeleteSetting(_serialization.Model): """AutoDeleteSetting. - :ivar condition: When to check if an asset is expired. Possible values include: - "CreatedGreaterThan", "LastAccessedGreaterThan". + :ivar condition: When to check if an asset is expired. Known values are: "CreatedGreaterThan" + and "LastAccessedGreaterThan". :vartype condition: str or ~azure.mgmt.machinelearningservices.models.AutoDeleteCondition :ivar value: Expiration condition value. :vartype value: str """ _attribute_map = { - 'condition': {'key': 'condition', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + "condition": {"key": "condition", "type": "str"}, + "value": {"key": "value", "type": "str"}, } def __init__( self, *, - condition: Optional[Union[str, "AutoDeleteCondition"]] = None, + condition: Optional[Union[str, "_models.AutoDeleteCondition"]] = None, value: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword condition: When to check if an asset is expired. Possible values include: - "CreatedGreaterThan", "LastAccessedGreaterThan". + :keyword condition: When to check if an asset is expired. Known values are: + "CreatedGreaterThan" and "LastAccessedGreaterThan". :paramtype condition: str or ~azure.mgmt.machinelearningservices.models.AutoDeleteCondition :keyword value: Expiration condition value. :paramtype value: str """ - super(AutoDeleteSetting, self).__init__(**kwargs) + super().__init__(**kwargs) self.condition = condition self.value = value -class ForecastHorizon(msrest.serialization.Model): +class ForecastHorizon(_serialization.Model): """The desired maximum forecast horizon in units of time-series frequency. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AutoForecastHorizon, CustomForecastHorizon. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AutoForecastHorizon, CustomForecastHorizon All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] Set forecast horizon value selection mode.Constant filled by - server. Possible values include: "Auto", "Custom". + :ivar mode: [Required] Set forecast horizon value selection mode. Required. Known values are: + "Auto" and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.ForecastHorizonMode """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - _subtype_map = { - 'mode': {'Auto': 'AutoForecastHorizon', 'Custom': 'CustomForecastHorizon'} - } + _subtype_map = {"mode": {"Auto": "AutoForecastHorizon", "Custom": "CustomForecastHorizon"}} - def __init__( - self, - **kwargs - ): - """ - """ - super(ForecastHorizon, self).__init__(**kwargs) - self.mode = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.mode: Optional[str] = None class AutoForecastHorizon(ForecastHorizon): @@ -2112,69 +2085,60 @@ class AutoForecastHorizon(ForecastHorizon): All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] Set forecast horizon value selection mode.Constant filled by - server. Possible values include: "Auto", "Custom". + :ivar mode: [Required] Set forecast horizon value selection mode. Required. Known values are: + "Auto" and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.ForecastHorizonMode """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(AutoForecastHorizon, self).__init__(**kwargs) - self.mode = 'Auto' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.mode: str = "Auto" -class AutologgerSettings(msrest.serialization.Model): +class AutologgerSettings(_serialization.Model): """Settings for Autologger. All required parameters must be populated in order to send to Azure. - :ivar mlflow_autologger: Required. [Required] Indicates whether mlflow autologger is enabled. - Possible values include: "Enabled", "Disabled". + :ivar mlflow_autologger: [Required] Indicates whether mlflow autologger is enabled. Required. + Known values are: "Enabled" and "Disabled". :vartype mlflow_autologger: str or ~azure.mgmt.machinelearningservices.models.MLFlowAutologgerState """ _validation = { - 'mlflow_autologger': {'required': True}, + "mlflow_autologger": {"required": True}, } _attribute_map = { - 'mlflow_autologger': {'key': 'mlflowAutologger', 'type': 'str'}, + "mlflow_autologger": {"key": "mlflowAutologger", "type": "str"}, } - def __init__( - self, - *, - mlflow_autologger: Union[str, "MLFlowAutologgerState"], - **kwargs - ): + def __init__(self, *, mlflow_autologger: Union[str, "_models.MLFlowAutologgerState"], **kwargs: Any) -> None: """ - :keyword mlflow_autologger: Required. [Required] Indicates whether mlflow autologger is - enabled. Possible values include: "Enabled", "Disabled". + :keyword mlflow_autologger: [Required] Indicates whether mlflow autologger is enabled. + Required. Known values are: "Enabled" and "Disabled". :paramtype mlflow_autologger: str or ~azure.mgmt.machinelearningservices.models.MLFlowAutologgerState """ - super(AutologgerSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.mlflow_autologger = mlflow_autologger -class JobBaseProperties(ResourceBase): +class JobBaseProperties(ResourceBase): # pylint: disable=too-many-instance-attributes """Base definition for a job. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AutoMLJob, CommandJob, LabelingJobProperties, PipelineJob, SparkJob, SweepJob. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AutoMLJob, CommandJob, LabelingJobProperties, PipelineJob, SparkJob, SweepJob Variables are only populated by the server, and will be ignored when sending a request. @@ -2184,7 +2148,7 @@ class JobBaseProperties(ResourceBase): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar component_id: ARM resource ID of the component resource. :vartype component_id: str @@ -2201,8 +2165,8 @@ class JobBaseProperties(ResourceBase): :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration :ivar is_archived: Is the asset archived?. :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType :ivar notification_setting: Notification setting for the job. :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting @@ -2212,36 +2176,43 @@ class JobBaseProperties(ResourceBase): :ivar services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, + "job_type": {"required": True}, + "status": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, } _subtype_map = { - 'job_type': {'AutoML': 'AutoMLJob', 'Command': 'CommandJob', 'Labeling': 'LabelingJobProperties', 'Pipeline': 'PipelineJob', 'Spark': 'SparkJob', 'Sweep': 'SweepJob'} + "job_type": { + "AutoML": "AutoMLJob", + "Command": "CommandJob", + "Labeling": "LabelingJobProperties", + "Pipeline": "PipelineJob", + "Spark": "SparkJob", + "Sweep": "SweepJob", + } } def __init__( @@ -2253,20 +2224,20 @@ def __init__( component_id: Optional[str] = None, compute_id: Optional[str] = None, display_name: Optional[str] = None, - experiment_name: Optional[str] = "Default", - identity: Optional["IdentityConfiguration"] = None, - is_archived: Optional[bool] = False, - notification_setting: Optional["NotificationSetting"] = None, - secrets_configuration: Optional[Dict[str, "SecretConfiguration"]] = None, - services: Optional[Dict[str, "JobService"]] = None, - **kwargs - ): + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + notification_setting: Optional["_models.NotificationSetting"] = None, + secrets_configuration: Optional[Dict[str, "_models.SecretConfiguration"]] = None, + services: Optional[Dict[str, "_models.JobService"]] = None, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword component_id: ARM resource ID of the component resource. :paramtype component_id: str @@ -2292,24 +2263,24 @@ def __init__( For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :paramtype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] """ - super(JobBaseProperties, self).__init__(description=description, properties=properties, tags=tags, **kwargs) + super().__init__(description=description, properties=properties, tags=tags, **kwargs) self.component_id = component_id self.compute_id = compute_id self.display_name = display_name self.experiment_name = experiment_name self.identity = identity self.is_archived = is_archived - self.job_type = 'JobBaseProperties' # type: str + self.job_type: Optional[str] = None self.notification_setting = notification_setting self.secrets_configuration = secrets_configuration self.services = services self.status = None -class AutoMLJob(JobBaseProperties): +class AutoMLJob(JobBaseProperties): # pylint: disable=too-many-instance-attributes """AutoMLJob class. -Use this class for executing AutoML tasks like Classification/Regression etc. -See TaskType enum for all the tasks supported. + Use this class for executing AutoML tasks like Classification/Regression etc. + See TaskType enum for all the tasks supported. Variables are only populated by the server, and will be ignored when sending a request. @@ -2319,7 +2290,7 @@ class AutoMLJob(JobBaseProperties): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar component_id: ARM resource ID of the component resource. :vartype component_id: str @@ -2336,8 +2307,8 @@ class AutoMLJob(JobBaseProperties): :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration :ivar is_archived: Is the asset archived?. :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType :ivar notification_setting: Notification setting for the job. :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting @@ -2347,9 +2318,9 @@ class AutoMLJob(JobBaseProperties): :ivar services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus :ivar environment_id: The ARM resource ID of the Environment specification for the job. This is optional value to provide, if not provided, AutoML will default this to Production @@ -2363,69 +2334,69 @@ class AutoMLJob(JobBaseProperties): :vartype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings :ivar resources: Compute Resource configuration for the job. :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration - :ivar task_details: Required. [Required] This represents scenario which can be one of - Tables/NLP/Image. + :ivar task_details: [Required] This represents scenario which can be one of Tables/NLP/Image. + Required. :vartype task_details: ~azure.mgmt.machinelearningservices.models.AutoMLVertical """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'task_details': {'required': True}, + "job_type": {"required": True}, + "status": {"readonly": True}, + "task_details": {"required": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'queue_settings': {'key': 'queueSettings', 'type': 'QueueSettings'}, - 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, - 'task_details': {'key': 'taskDetails', 'type': 'AutoMLVertical'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "queue_settings": {"key": "queueSettings", "type": "QueueSettings"}, + "resources": {"key": "resources", "type": "JobResourceConfiguration"}, + "task_details": {"key": "taskDetails", "type": "AutoMLVertical"}, } def __init__( self, *, - task_details: "AutoMLVertical", + task_details: "_models.AutoMLVertical", description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, component_id: Optional[str] = None, compute_id: Optional[str] = None, display_name: Optional[str] = None, - experiment_name: Optional[str] = "Default", - identity: Optional["IdentityConfiguration"] = None, - is_archived: Optional[bool] = False, - notification_setting: Optional["NotificationSetting"] = None, - secrets_configuration: Optional[Dict[str, "SecretConfiguration"]] = None, - services: Optional[Dict[str, "JobService"]] = None, + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + notification_setting: Optional["_models.NotificationSetting"] = None, + secrets_configuration: Optional[Dict[str, "_models.SecretConfiguration"]] = None, + services: Optional[Dict[str, "_models.JobService"]] = None, environment_id: Optional[str] = None, environment_variables: Optional[Dict[str, str]] = None, - outputs: Optional[Dict[str, "JobOutput"]] = None, - queue_settings: Optional["QueueSettings"] = None, - resources: Optional["JobResourceConfiguration"] = None, - **kwargs - ): + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + queue_settings: Optional["_models.QueueSettings"] = None, + resources: Optional["_models.JobResourceConfiguration"] = None, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword component_id: ARM resource ID of the component resource. :paramtype component_id: str @@ -2462,12 +2433,26 @@ def __init__( :paramtype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings :keyword resources: Compute Resource configuration for the job. :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration - :keyword task_details: Required. [Required] This represents scenario which can be one of - Tables/NLP/Image. + :keyword task_details: [Required] This represents scenario which can be one of + Tables/NLP/Image. Required. :paramtype task_details: ~azure.mgmt.machinelearningservices.models.AutoMLVertical """ - super(AutoMLJob, self).__init__(description=description, properties=properties, tags=tags, component_id=component_id, compute_id=compute_id, display_name=display_name, experiment_name=experiment_name, identity=identity, is_archived=is_archived, notification_setting=notification_setting, secrets_configuration=secrets_configuration, services=services, **kwargs) - self.job_type = 'AutoML' # type: str + super().__init__( + description=description, + properties=properties, + tags=tags, + component_id=component_id, + compute_id=compute_id, + display_name=display_name, + experiment_name=experiment_name, + identity=identity, + is_archived=is_archived, + notification_setting=notification_setting, + secrets_configuration=secrets_configuration, + services=services, + **kwargs + ) + self.job_type: str = "AutoML" self.environment_id = environment_id self.environment_variables = environment_variables self.outputs = outputs @@ -2476,104 +2461,111 @@ def __init__( self.task_details = task_details -class AutoMLVertical(msrest.serialization.Model): +class AutoMLVertical(_serialization.Model): """AutoML vertical class. -Base class for AutoML verticals - TableVertical/ImageVertical/NLPVertical. + Base class for AutoML verticals - TableVertical/ImageVertical/NLPVertical. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: Classification, Forecasting, ImageClassification, ImageClassificationMultilabel, ImageInstanceSegmentation, ImageObjectDetection, Regression, TextClassification, TextClassificationMultilabel, TextNer. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + Classification, Forecasting, ImageClassification, ImageClassificationMultilabel, + ImageInstanceSegmentation, ImageObjectDetection, Regression, TextClassification, + TextClassificationMultilabel, TextNer All required parameters must be populated in order to send to Azure. - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity :ivar target_column_name: Target column name: This is prediction values column. Also known as label column name in context of classification tasks. :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". + "TextClassification", "TextClassificationMultilabel", and "TextNER". :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. + :ivar training_data: [Required] Training data input. Required. :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, } _subtype_map = { - 'task_type': {'Classification': 'Classification', 'Forecasting': 'Forecasting', 'ImageClassification': 'ImageClassification', 'ImageClassificationMultilabel': 'ImageClassificationMultilabel', 'ImageInstanceSegmentation': 'ImageInstanceSegmentation', 'ImageObjectDetection': 'ImageObjectDetection', 'Regression': 'Regression', 'TextClassification': 'TextClassification', 'TextClassificationMultilabel': 'TextClassificationMultilabel', 'TextNER': 'TextNer'} + "task_type": { + "Classification": "Classification", + "Forecasting": "Forecasting", + "ImageClassification": "ImageClassification", + "ImageClassificationMultilabel": "ImageClassificationMultilabel", + "ImageInstanceSegmentation": "ImageInstanceSegmentation", + "ImageObjectDetection": "ImageObjectDetection", + "Regression": "Regression", + "TextClassification": "TextClassification", + "TextClassificationMultilabel": "TextClassificationMultilabel", + "TextNER": "TextNer", + } } def __init__( self, *, - training_data: "MLTableJobInput", - log_verbosity: Optional[Union[str, "LogVerbosity"]] = None, + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, target_column_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity :keyword target_column_name: Target column name: This is prediction values column. Also known as label column name in context of classification tasks. :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. + :keyword training_data: [Required] Training data input. Required. :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ - super(AutoMLVertical, self).__init__(**kwargs) + super().__init__(**kwargs) self.log_verbosity = log_verbosity self.target_column_name = target_column_name - self.task_type = None # type: Optional[str] + self.task_type: Optional[str] = None self.training_data = training_data -class NCrossValidations(msrest.serialization.Model): +class NCrossValidations(_serialization.Model): """N-Cross validations value. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AutoNCrossValidations, CustomNCrossValidations. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AutoNCrossValidations, CustomNCrossValidations All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] Mode for determining N-Cross validations.Constant filled by - server. Possible values include: "Auto", "Custom". + :ivar mode: [Required] Mode for determining N-Cross validations. Required. Known values are: + "Auto" and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.NCrossValidationsMode """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - _subtype_map = { - 'mode': {'Auto': 'AutoNCrossValidations', 'Custom': 'CustomNCrossValidations'} - } + _subtype_map = {"mode": {"Auto": "AutoNCrossValidations", "Custom": "CustomNCrossValidations"}} - def __init__( - self, - **kwargs - ): - """ - """ - super(NCrossValidations, self).__init__(**kwargs) - self.mode = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.mode: Optional[str] = None class AutoNCrossValidations(NCrossValidations): @@ -2581,30 +2573,26 @@ class AutoNCrossValidations(NCrossValidations): All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] Mode for determining N-Cross validations.Constant filled by - server. Possible values include: "Auto", "Custom". + :ivar mode: [Required] Mode for determining N-Cross validations. Required. Known values are: + "Auto" and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.NCrossValidationsMode """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(AutoNCrossValidations, self).__init__(**kwargs) - self.mode = 'Auto' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.mode: str = "Auto" -class AutoPauseProperties(msrest.serialization.Model): +class AutoPauseProperties(_serialization.Model): """Auto pause properties. :ivar delay_in_minutes: @@ -2614,29 +2602,25 @@ class AutoPauseProperties(msrest.serialization.Model): """ _attribute_map = { - 'delay_in_minutes': {'key': 'delayInMinutes', 'type': 'int'}, - 'enabled': {'key': 'enabled', 'type': 'bool'}, + "delay_in_minutes": {"key": "delayInMinutes", "type": "int"}, + "enabled": {"key": "enabled", "type": "bool"}, } def __init__( - self, - *, - delay_in_minutes: Optional[int] = None, - enabled: Optional[bool] = None, - **kwargs - ): + self, *, delay_in_minutes: Optional[int] = None, enabled: Optional[bool] = None, **kwargs: Any + ) -> None: """ :keyword delay_in_minutes: :paramtype delay_in_minutes: int :keyword enabled: :paramtype enabled: bool """ - super(AutoPauseProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.delay_in_minutes = delay_in_minutes self.enabled = enabled -class AutoScaleProperties(msrest.serialization.Model): +class AutoScaleProperties(_serialization.Model): """Auto scale properties. :ivar min_node_count: @@ -2648,9 +2632,9 @@ class AutoScaleProperties(msrest.serialization.Model): """ _attribute_map = { - 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, - 'enabled': {'key': 'enabled', 'type': 'bool'}, - 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, + "min_node_count": {"key": "minNodeCount", "type": "int"}, + "enabled": {"key": "enabled", "type": "bool"}, + "max_node_count": {"key": "maxNodeCount", "type": "int"}, } def __init__( @@ -2659,8 +2643,8 @@ def __init__( min_node_count: Optional[int] = None, enabled: Optional[bool] = None, max_node_count: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword min_node_count: :paramtype min_node_count: int @@ -2669,45 +2653,38 @@ def __init__( :keyword max_node_count: :paramtype max_node_count: int """ - super(AutoScaleProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.min_node_count = min_node_count self.enabled = enabled self.max_node_count = max_node_count -class Seasonality(msrest.serialization.Model): +class Seasonality(_serialization.Model): """Forecasting seasonality. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AutoSeasonality, CustomSeasonality. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AutoSeasonality, CustomSeasonality All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] Seasonality mode.Constant filled by server. Possible values - include: "Auto", "Custom". + :ivar mode: [Required] Seasonality mode. Required. Known values are: "Auto" and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.SeasonalityMode """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - _subtype_map = { - 'mode': {'Auto': 'AutoSeasonality', 'Custom': 'CustomSeasonality'} - } + _subtype_map = {"mode": {"Auto": "AutoSeasonality", "Custom": "CustomSeasonality"}} - def __init__( - self, - **kwargs - ): - """ - """ - super(Seasonality, self).__init__(**kwargs) - self.mode = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.mode: Optional[str] = None class AutoSeasonality(Seasonality): @@ -2715,62 +2692,51 @@ class AutoSeasonality(Seasonality): All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] Seasonality mode.Constant filled by server. Possible values - include: "Auto", "Custom". + :ivar mode: [Required] Seasonality mode. Required. Known values are: "Auto" and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.SeasonalityMode """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(AutoSeasonality, self).__init__(**kwargs) - self.mode = 'Auto' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.mode: str = "Auto" -class TargetLags(msrest.serialization.Model): +class TargetLags(_serialization.Model): """The number of past periods to lag from the target column. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AutoTargetLags, CustomTargetLags. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AutoTargetLags, CustomTargetLags All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] Set target lags mode - Auto/Custom.Constant filled by server. - Possible values include: "Auto", "Custom". + :ivar mode: [Required] Set target lags mode - Auto/Custom. Required. Known values are: "Auto" + and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetLagsMode """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - _subtype_map = { - 'mode': {'Auto': 'AutoTargetLags', 'Custom': 'CustomTargetLags'} - } + _subtype_map = {"mode": {"Auto": "AutoTargetLags", "Custom": "CustomTargetLags"}} - def __init__( - self, - **kwargs - ): - """ - """ - super(TargetLags, self).__init__(**kwargs) - self.mode = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.mode: Optional[str] = None class AutoTargetLags(TargetLags): @@ -2778,62 +2744,52 @@ class AutoTargetLags(TargetLags): All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] Set target lags mode - Auto/Custom.Constant filled by server. - Possible values include: "Auto", "Custom". + :ivar mode: [Required] Set target lags mode - Auto/Custom. Required. Known values are: "Auto" + and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetLagsMode """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(AutoTargetLags, self).__init__(**kwargs) - self.mode = 'Auto' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.mode: str = "Auto" -class TargetRollingWindowSize(msrest.serialization.Model): +class TargetRollingWindowSize(_serialization.Model): """Forecasting target rolling window size. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AutoTargetRollingWindowSize, CustomTargetRollingWindowSize. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AutoTargetRollingWindowSize, CustomTargetRollingWindowSize All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] TargetRollingWindowSiz detection mode.Constant filled by - server. Possible values include: "Auto", "Custom". + :ivar mode: [Required] TargetRollingWindowSiz detection mode. Required. Known values are: + "Auto" and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSizeMode """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - _subtype_map = { - 'mode': {'Auto': 'AutoTargetRollingWindowSize', 'Custom': 'CustomTargetRollingWindowSize'} - } + _subtype_map = {"mode": {"Auto": "AutoTargetRollingWindowSize", "Custom": "CustomTargetRollingWindowSize"}} - def __init__( - self, - **kwargs - ): - """ - """ - super(TargetRollingWindowSize, self).__init__(**kwargs) - self.mode = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.mode: Optional[str] = None class AutoTargetRollingWindowSize(TargetRollingWindowSize): @@ -2841,63 +2797,58 @@ class AutoTargetRollingWindowSize(TargetRollingWindowSize): All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] TargetRollingWindowSiz detection mode.Constant filled by - server. Possible values include: "Auto", "Custom". + :ivar mode: [Required] TargetRollingWindowSiz detection mode. Required. Known values are: + "Auto" and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSizeMode """ _validation = { - 'mode': {'required': True}, + "mode": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(AutoTargetRollingWindowSize, self).__init__(**kwargs) - self.mode = 'Auto' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.mode: str = "Auto" -class MonitoringAlertNotificationSettingsBase(msrest.serialization.Model): +class MonitoringAlertNotificationSettingsBase(_serialization.Model): """MonitoringAlertNotificationSettingsBase. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzMonMonitoringAlertNotificationSettings, EmailMonitoringAlertNotificationSettings. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzMonMonitoringAlertNotificationSettings, EmailMonitoringAlertNotificationSettings All required parameters must be populated in order to send to Azure. - :ivar alert_notification_type: Required. [Required] Specifies the type of signal to - monitor.Constant filled by server. Possible values include: "AzureMonitor", "Email". + :ivar alert_notification_type: [Required] Specifies the type of signal to monitor. Required. + Known values are: "AzureMonitor" and "Email". :vartype alert_notification_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationType """ _validation = { - 'alert_notification_type': {'required': True}, + "alert_notification_type": {"required": True}, } _attribute_map = { - 'alert_notification_type': {'key': 'alertNotificationType', 'type': 'str'}, + "alert_notification_type": {"key": "alertNotificationType", "type": "str"}, } _subtype_map = { - 'alert_notification_type': {'AzureMonitor': 'AzMonMonitoringAlertNotificationSettings', 'Email': 'EmailMonitoringAlertNotificationSettings'} + "alert_notification_type": { + "AzureMonitor": "AzMonMonitoringAlertNotificationSettings", + "Email": "EmailMonitoringAlertNotificationSettings", + } } - def __init__( - self, - **kwargs - ): - """ - """ - super(MonitoringAlertNotificationSettingsBase, self).__init__(**kwargs) - self.alert_notification_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.alert_notification_type: Optional[str] = None class AzMonMonitoringAlertNotificationSettings(MonitoringAlertNotificationSettingsBase): @@ -2905,67 +2856,32 @@ class AzMonMonitoringAlertNotificationSettings(MonitoringAlertNotificationSettin All required parameters must be populated in order to send to Azure. - :ivar alert_notification_type: Required. [Required] Specifies the type of signal to - monitor.Constant filled by server. Possible values include: "AzureMonitor", "Email". + :ivar alert_notification_type: [Required] Specifies the type of signal to monitor. Required. + Known values are: "AzureMonitor" and "Email". :vartype alert_notification_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationType """ _validation = { - 'alert_notification_type': {'required': True}, - } - - _attribute_map = { - 'alert_notification_type': {'key': 'alertNotificationType', 'type': 'str'}, + "alert_notification_type": {"required": True}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(AzMonMonitoringAlertNotificationSettings, self).__init__(**kwargs) - self.alert_notification_type = 'AzureMonitor' # type: str - - -class AzureDatastore(msrest.serialization.Model): - """Base definition for Azure datastore contents configuration. - - :ivar resource_group: Azure Resource Group name. - :vartype resource_group: str - :ivar subscription_id: Azure Subscription Id. - :vartype subscription_id: str - """ - _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, + "alert_notification_type": {"key": "alertNotificationType", "type": "str"}, } - def __init__( - self, - *, - resource_group: Optional[str] = None, - subscription_id: Optional[str] = None, - **kwargs - ): - """ - :keyword resource_group: Azure Resource Group name. - :paramtype resource_group: str - :keyword subscription_id: Azure Subscription Id. - :paramtype subscription_id: str - """ - super(AzureDatastore, self).__init__(**kwargs) - self.resource_group = resource_group - self.subscription_id = subscription_id + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.alert_notification_type: str = "AzureMonitor" class DatastoreProperties(ResourceBase): """Base definition for datastore contents configuration. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureBlobDatastore, AzureDataLakeGen1Datastore, AzureDataLakeGen2Datastore, AzureFileDatastore, HdfsDatastore, OneLakeDatastore. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureBlobDatastore, AzureDataLakeGen1Datastore, AzureDataLakeGen2Datastore, AzureFileDatastore, + HdfsDatastore, OneLakeDatastore Variables are only populated by the server, and will be ignored when sending a request. @@ -2975,13 +2891,12 @@ class DatastoreProperties(ResourceBase): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar credentials: Required. [Required] Account credentials. + :ivar credentials: [Required] Account credentials. Required. :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled - by server. Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", - "AzureFile", "Hdfs", "OneLake". + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", "Hdfs", and "OneLake". :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType :ivar intellectual_property: Intellectual Property details. :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty @@ -2991,83 +2906,117 @@ class DatastoreProperties(ResourceBase): """ _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, } _subtype_map = { - 'datastore_type': {'AzureBlob': 'AzureBlobDatastore', 'AzureDataLakeGen1': 'AzureDataLakeGen1Datastore', 'AzureDataLakeGen2': 'AzureDataLakeGen2Datastore', 'AzureFile': 'AzureFileDatastore', 'Hdfs': 'HdfsDatastore', 'OneLake': 'OneLakeDatastore'} + "datastore_type": { + "AzureBlob": "AzureBlobDatastore", + "AzureDataLakeGen1": "AzureDataLakeGen1Datastore", + "AzureDataLakeGen2": "AzureDataLakeGen2Datastore", + "AzureFile": "AzureFileDatastore", + "Hdfs": "HdfsDatastore", + "OneLake": "OneLakeDatastore", + } } def __init__( self, *, - credentials: "DatastoreCredentials", + credentials: "_models.DatastoreCredentials", description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - intellectual_property: Optional["IntellectualProperty"] = None, - **kwargs - ): + intellectual_property: Optional["_models.IntellectualProperty"] = None, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword credentials: Required. [Required] Account credentials. + :keyword credentials: [Required] Account credentials. Required. :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials :keyword intellectual_property: Intellectual Property details. :paramtype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty """ - super(DatastoreProperties, self).__init__(description=description, properties=properties, tags=tags, **kwargs) + super().__init__(description=description, properties=properties, tags=tags, **kwargs) self.credentials = credentials - self.datastore_type = 'DatastoreProperties' # type: str + self.datastore_type: Optional[str] = None self.intellectual_property = intellectual_property self.is_default = None -class AzureBlobDatastore(DatastoreProperties, AzureDatastore): +class AzureDatastore(_serialization.Model): + """Base definition for Azure datastore contents configuration. + + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + """ + + _attribute_map = { + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + } + + def __init__( + self, *, resource_group: Optional[str] = None, subscription_id: Optional[str] = None, **kwargs: Any + ) -> None: + """ + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + """ + super().__init__(**kwargs) + self.resource_group = resource_group + self.subscription_id = subscription_id + + +class AzureBlobDatastore(AzureDatastore, DatastoreProperties): # pylint: disable=too-many-instance-attributes """Azure Blob datastore configuration. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar resource_group: Azure Resource Group name. - :vartype resource_group: str - :ivar subscription_id: Azure Subscription Id. - :vartype subscription_id: str :ivar description: The asset description text. :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar credentials: Required. [Required] Account credentials. + :ivar credentials: [Required] Account credentials. Required. :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled - by server. Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", - "AzureFile", "Hdfs", "OneLake". + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", "Hdfs", and "OneLake". :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType :ivar intellectual_property: Intellectual Property details. :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty :ivar is_default: Readonly property to indicate if datastore is the workspace default datastore. :vartype is_default: bool + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str :ivar account_name: Storage account name. :vartype account_name: str :ivar container_name: Storage account container name. @@ -3077,382 +3026,400 @@ class AzureBlobDatastore(DatastoreProperties, AzureDatastore): :ivar protocol: Protocol used to communicate with the storage account. :vartype protocol: str :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". :vartype service_data_access_auth_identity: str or ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, } _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'container_name': {'key': 'containerName', 'type': 'str'}, - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "account_name": {"key": "accountName", "type": "str"}, + "container_name": {"key": "containerName", "type": "str"}, + "endpoint": {"key": "endpoint", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, } def __init__( self, *, - credentials: "DatastoreCredentials", - resource_group: Optional[str] = None, - subscription_id: Optional[str] = None, + credentials: "_models.DatastoreCredentials", description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - intellectual_property: Optional["IntellectualProperty"] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, account_name: Optional[str] = None, container_name: Optional[str] = None, endpoint: Optional[str] = None, protocol: Optional[str] = None, - service_data_access_auth_identity: Optional[Union[str, "ServiceDataAccessAuthIdentity"]] = None, - **kwargs - ): + service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, + **kwargs: Any + ) -> None: """ - :keyword resource_group: Azure Resource Group name. - :paramtype resource_group: str - :keyword subscription_id: Azure Subscription Id. - :paramtype subscription_id: str :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword credentials: Required. [Required] Account credentials. + :keyword credentials: [Required] Account credentials. Required. :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials :keyword intellectual_property: Intellectual Property details. :paramtype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword account_name: Storage account name. - :paramtype account_name: str - :keyword container_name: Storage account container name. + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + :keyword account_name: Storage account name. + :paramtype account_name: str + :keyword container_name: Storage account container name. :paramtype container_name: str :keyword endpoint: Azure cloud endpoint for the storage account. :paramtype endpoint: str :keyword protocol: Protocol used to communicate with the storage account. :paramtype protocol: str :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". :paramtype service_data_access_auth_identity: str or ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ - super(AzureBlobDatastore, self).__init__(description=description, properties=properties, tags=tags, credentials=credentials, intellectual_property=intellectual_property, resource_group=resource_group, subscription_id=subscription_id, **kwargs) - self.resource_group = resource_group - self.subscription_id = subscription_id - self.datastore_type = 'AzureBlob' # type: str - self.account_name = account_name - self.container_name = container_name - self.endpoint = endpoint - self.protocol = protocol - self.service_data_access_auth_identity = service_data_access_auth_identity + super().__init__( + resource_group=resource_group, + subscription_id=subscription_id, + description=description, + properties=properties, + tags=tags, + credentials=credentials, + intellectual_property=intellectual_property, + **kwargs + ) self.description = description self.properties = properties self.tags = tags self.credentials = credentials + self.datastore_type: str = "AzureBlob" self.intellectual_property = intellectual_property self.is_default = None + self.account_name = account_name + self.container_name = container_name + self.endpoint = endpoint + self.protocol = protocol + self.service_data_access_auth_identity = service_data_access_auth_identity + self.resource_group = resource_group + self.subscription_id = subscription_id -class AzureDataLakeGen1Datastore(DatastoreProperties, AzureDatastore): +class AzureDataLakeGen1Datastore(AzureDatastore, DatastoreProperties): # pylint: disable=too-many-instance-attributes """Azure Data Lake Gen1 datastore configuration. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar resource_group: Azure Resource Group name. - :vartype resource_group: str - :ivar subscription_id: Azure Subscription Id. - :vartype subscription_id: str :ivar description: The asset description text. :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar credentials: Required. [Required] Account credentials. + :ivar credentials: [Required] Account credentials. Required. :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled - by server. Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", - "AzureFile", "Hdfs", "OneLake". + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", "Hdfs", and "OneLake". :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType :ivar intellectual_property: Intellectual Property details. :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty :ivar is_default: Readonly property to indicate if datastore is the workspace default datastore. :vartype is_default: bool + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". :vartype service_data_access_auth_identity: str or ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity - :ivar store_name: Required. [Required] Azure Data Lake store name. + :ivar store_name: [Required] Azure Data Lake store name. Required. :vartype store_name: str """ _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'store_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "store_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, - 'store_name': {'key': 'storeName', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, + "store_name": {"key": "storeName", "type": "str"}, } def __init__( self, *, - credentials: "DatastoreCredentials", + credentials: "_models.DatastoreCredentials", store_name: str, - resource_group: Optional[str] = None, - subscription_id: Optional[str] = None, description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - intellectual_property: Optional["IntellectualProperty"] = None, - service_data_access_auth_identity: Optional[Union[str, "ServiceDataAccessAuthIdentity"]] = None, - **kwargs - ): + intellectual_property: Optional["_models.IntellectualProperty"] = None, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, + service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, + **kwargs: Any + ) -> None: """ - :keyword resource_group: Azure Resource Group name. - :paramtype resource_group: str - :keyword subscription_id: Azure Subscription Id. - :paramtype subscription_id: str :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword credentials: Required. [Required] Account credentials. + :keyword credentials: [Required] Account credentials. Required. :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials :keyword intellectual_property: Intellectual Property details. :paramtype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". :paramtype service_data_access_auth_identity: str or ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity - :keyword store_name: Required. [Required] Azure Data Lake store name. + :keyword store_name: [Required] Azure Data Lake store name. Required. :paramtype store_name: str """ - super(AzureDataLakeGen1Datastore, self).__init__(description=description, properties=properties, tags=tags, credentials=credentials, intellectual_property=intellectual_property, resource_group=resource_group, subscription_id=subscription_id, **kwargs) - self.resource_group = resource_group - self.subscription_id = subscription_id - self.datastore_type = 'AzureDataLakeGen1' # type: str - self.service_data_access_auth_identity = service_data_access_auth_identity - self.store_name = store_name + super().__init__( + resource_group=resource_group, + subscription_id=subscription_id, + description=description, + properties=properties, + tags=tags, + credentials=credentials, + intellectual_property=intellectual_property, + **kwargs + ) self.description = description self.properties = properties self.tags = tags self.credentials = credentials + self.datastore_type: str = "AzureDataLakeGen1" self.intellectual_property = intellectual_property self.is_default = None + self.service_data_access_auth_identity = service_data_access_auth_identity + self.store_name = store_name + self.resource_group = resource_group + self.subscription_id = subscription_id -class AzureDataLakeGen2Datastore(DatastoreProperties, AzureDatastore): +class AzureDataLakeGen2Datastore(AzureDatastore, DatastoreProperties): # pylint: disable=too-many-instance-attributes """Azure Data Lake Gen2 datastore configuration. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar resource_group: Azure Resource Group name. - :vartype resource_group: str - :ivar subscription_id: Azure Subscription Id. - :vartype subscription_id: str :ivar description: The asset description text. :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar credentials: Required. [Required] Account credentials. + :ivar credentials: [Required] Account credentials. Required. :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled - by server. Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", - "AzureFile", "Hdfs", "OneLake". + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", "Hdfs", and "OneLake". :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType :ivar intellectual_property: Intellectual Property details. :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty :ivar is_default: Readonly property to indicate if datastore is the workspace default datastore. :vartype is_default: bool - :ivar account_name: Required. [Required] Storage account name. + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + :ivar account_name: [Required] Storage account name. Required. :vartype account_name: str :ivar endpoint: Azure cloud endpoint for the storage account. :vartype endpoint: str - :ivar filesystem: Required. [Required] The name of the Data Lake Gen2 filesystem. + :ivar filesystem: [Required] The name of the Data Lake Gen2 filesystem. Required. :vartype filesystem: str :ivar protocol: Protocol used to communicate with the storage account. :vartype protocol: str :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". :vartype service_data_access_auth_identity: str or ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'account_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'filesystem': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "account_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "filesystem": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'filesystem': {'key': 'filesystem', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "account_name": {"key": "accountName", "type": "str"}, + "endpoint": {"key": "endpoint", "type": "str"}, + "filesystem": {"key": "filesystem", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, } def __init__( self, *, - credentials: "DatastoreCredentials", + credentials: "_models.DatastoreCredentials", account_name: str, filesystem: str, - resource_group: Optional[str] = None, - subscription_id: Optional[str] = None, description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - intellectual_property: Optional["IntellectualProperty"] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, endpoint: Optional[str] = None, protocol: Optional[str] = None, - service_data_access_auth_identity: Optional[Union[str, "ServiceDataAccessAuthIdentity"]] = None, - **kwargs - ): + service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, + **kwargs: Any + ) -> None: """ - :keyword resource_group: Azure Resource Group name. - :paramtype resource_group: str - :keyword subscription_id: Azure Subscription Id. - :paramtype subscription_id: str :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword credentials: Required. [Required] Account credentials. + :keyword credentials: [Required] Account credentials. Required. :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials :keyword intellectual_property: Intellectual Property details. :paramtype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword account_name: Required. [Required] Storage account name. + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + :keyword account_name: [Required] Storage account name. Required. :paramtype account_name: str :keyword endpoint: Azure cloud endpoint for the storage account. :paramtype endpoint: str - :keyword filesystem: Required. [Required] The name of the Data Lake Gen2 filesystem. + :keyword filesystem: [Required] The name of the Data Lake Gen2 filesystem. Required. :paramtype filesystem: str :keyword protocol: Protocol used to communicate with the storage account. :paramtype protocol: str :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". :paramtype service_data_access_auth_identity: str or ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ - super(AzureDataLakeGen2Datastore, self).__init__(description=description, properties=properties, tags=tags, credentials=credentials, intellectual_property=intellectual_property, resource_group=resource_group, subscription_id=subscription_id, **kwargs) - self.resource_group = resource_group - self.subscription_id = subscription_id - self.datastore_type = 'AzureDataLakeGen2' # type: str - self.account_name = account_name - self.endpoint = endpoint - self.filesystem = filesystem - self.protocol = protocol - self.service_data_access_auth_identity = service_data_access_auth_identity + super().__init__( + resource_group=resource_group, + subscription_id=subscription_id, + description=description, + properties=properties, + tags=tags, + credentials=credentials, + intellectual_property=intellectual_property, + **kwargs + ) self.description = description self.properties = properties self.tags = tags self.credentials = credentials + self.datastore_type: str = "AzureDataLakeGen2" self.intellectual_property = intellectual_property self.is_default = None + self.account_name = account_name + self.endpoint = endpoint + self.filesystem = filesystem + self.protocol = protocol + self.service_data_access_auth_identity = service_data_access_auth_identity + self.resource_group = resource_group + self.subscription_id = subscription_id -class Webhook(msrest.serialization.Model): +class Webhook(_serialization.Model): """Webhook base. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureDevOpsWebhook. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureDevOpsWebhook All required parameters must be populated in order to send to Azure. :ivar event_type: Send callback on a specified notification event. :vartype event_type: str - :ivar webhook_type: Required. [Required] Specifies the type of service to send a - callback.Constant filled by server. Possible values include: "AzureDevOps". + :ivar webhook_type: [Required] Specifies the type of service to send a callback. Required. + "AzureDevOps" :vartype webhook_type: str or ~azure.mgmt.machinelearningservices.models.WebhookType """ _validation = { - 'webhook_type': {'required': True}, + "webhook_type": {"required": True}, } _attribute_map = { - 'event_type': {'key': 'eventType', 'type': 'str'}, - 'webhook_type': {'key': 'webhookType', 'type': 'str'}, + "event_type": {"key": "eventType", "type": "str"}, + "webhook_type": {"key": "webhookType", "type": "str"}, } - _subtype_map = { - 'webhook_type': {'AzureDevOps': 'AzureDevOpsWebhook'} - } + _subtype_map = {"webhook_type": {"AzureDevOps": "AzureDevOpsWebhook"}} - def __init__( - self, - *, - event_type: Optional[str] = None, - **kwargs - ): + def __init__(self, *, event_type: Optional[str] = None, **kwargs: Any) -> None: """ :keyword event_type: Send callback on a specified notification event. :paramtype event_type: str """ - super(Webhook, self).__init__(**kwargs) + super().__init__(**kwargs) self.event_type = event_type - self.webhook_type = None # type: Optional[str] + self.webhook_type: Optional[str] = None class AzureDevOpsWebhook(Webhook): @@ -3462,201 +3429,206 @@ class AzureDevOpsWebhook(Webhook): :ivar event_type: Send callback on a specified notification event. :vartype event_type: str - :ivar webhook_type: Required. [Required] Specifies the type of service to send a - callback.Constant filled by server. Possible values include: "AzureDevOps". + :ivar webhook_type: [Required] Specifies the type of service to send a callback. Required. + "AzureDevOps" :vartype webhook_type: str or ~azure.mgmt.machinelearningservices.models.WebhookType """ _validation = { - 'webhook_type': {'required': True}, + "webhook_type": {"required": True}, } _attribute_map = { - 'event_type': {'key': 'eventType', 'type': 'str'}, - 'webhook_type': {'key': 'webhookType', 'type': 'str'}, + "event_type": {"key": "eventType", "type": "str"}, + "webhook_type": {"key": "webhookType", "type": "str"}, } - def __init__( - self, - *, - event_type: Optional[str] = None, - **kwargs - ): + def __init__(self, *, event_type: Optional[str] = None, **kwargs: Any) -> None: """ :keyword event_type: Send callback on a specified notification event. :paramtype event_type: str """ - super(AzureDevOpsWebhook, self).__init__(event_type=event_type, **kwargs) - self.webhook_type = 'AzureDevOps' # type: str + super().__init__(event_type=event_type, **kwargs) + self.webhook_type: str = "AzureDevOps" -class AzureFileDatastore(DatastoreProperties, AzureDatastore): +class AzureFileDatastore(AzureDatastore, DatastoreProperties): # pylint: disable=too-many-instance-attributes """Azure File datastore configuration. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar resource_group: Azure Resource Group name. - :vartype resource_group: str - :ivar subscription_id: Azure Subscription Id. - :vartype subscription_id: str :ivar description: The asset description text. :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar credentials: Required. [Required] Account credentials. + :ivar credentials: [Required] Account credentials. Required. :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled - by server. Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", - "AzureFile", "Hdfs", "OneLake". + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", "Hdfs", and "OneLake". :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType :ivar intellectual_property: Intellectual Property details. :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty :ivar is_default: Readonly property to indicate if datastore is the workspace default datastore. :vartype is_default: bool - :ivar account_name: Required. [Required] Storage account name. + :ivar resource_group: Azure Resource Group name. + :vartype resource_group: str + :ivar subscription_id: Azure Subscription Id. + :vartype subscription_id: str + :ivar account_name: [Required] Storage account name. Required. :vartype account_name: str :ivar endpoint: Azure cloud endpoint for the storage account. :vartype endpoint: str - :ivar file_share_name: Required. [Required] The name of the Azure file share that the datastore - points to. + :ivar file_share_name: [Required] The name of the Azure file share that the datastore points + to. Required. :vartype file_share_name: str :ivar protocol: Protocol used to communicate with the storage account. :vartype protocol: str :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". :vartype service_data_access_auth_identity: str or ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'account_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'file_share_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "account_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "file_share_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'account_name': {'key': 'accountName', 'type': 'str'}, - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'file_share_name': {'key': 'fileShareName', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "account_name": {"key": "accountName", "type": "str"}, + "endpoint": {"key": "endpoint", "type": "str"}, + "file_share_name": {"key": "fileShareName", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, } def __init__( self, *, - credentials: "DatastoreCredentials", + credentials: "_models.DatastoreCredentials", account_name: str, file_share_name: str, - resource_group: Optional[str] = None, - subscription_id: Optional[str] = None, description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - intellectual_property: Optional["IntellectualProperty"] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + resource_group: Optional[str] = None, + subscription_id: Optional[str] = None, endpoint: Optional[str] = None, protocol: Optional[str] = None, - service_data_access_auth_identity: Optional[Union[str, "ServiceDataAccessAuthIdentity"]] = None, - **kwargs - ): + service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, + **kwargs: Any + ) -> None: """ - :keyword resource_group: Azure Resource Group name. - :paramtype resource_group: str - :keyword subscription_id: Azure Subscription Id. - :paramtype subscription_id: str :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword credentials: Required. [Required] Account credentials. + :keyword credentials: [Required] Account credentials. Required. :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials :keyword intellectual_property: Intellectual Property details. :paramtype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword account_name: Required. [Required] Storage account name. + :keyword resource_group: Azure Resource Group name. + :paramtype resource_group: str + :keyword subscription_id: Azure Subscription Id. + :paramtype subscription_id: str + :keyword account_name: [Required] Storage account name. Required. :paramtype account_name: str :keyword endpoint: Azure cloud endpoint for the storage account. :paramtype endpoint: str - :keyword file_share_name: Required. [Required] The name of the Azure file share that the - datastore points to. + :keyword file_share_name: [Required] The name of the Azure file share that the datastore points + to. Required. :paramtype file_share_name: str :keyword protocol: Protocol used to communicate with the storage account. :paramtype protocol: str :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". :paramtype service_data_access_auth_identity: str or ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ - super(AzureFileDatastore, self).__init__(description=description, properties=properties, tags=tags, credentials=credentials, intellectual_property=intellectual_property, resource_group=resource_group, subscription_id=subscription_id, **kwargs) - self.resource_group = resource_group - self.subscription_id = subscription_id - self.datastore_type = 'AzureFile' # type: str - self.account_name = account_name - self.endpoint = endpoint - self.file_share_name = file_share_name - self.protocol = protocol - self.service_data_access_auth_identity = service_data_access_auth_identity + super().__init__( + resource_group=resource_group, + subscription_id=subscription_id, + description=description, + properties=properties, + tags=tags, + credentials=credentials, + intellectual_property=intellectual_property, + **kwargs + ) self.description = description self.properties = properties self.tags = tags self.credentials = credentials + self.datastore_type: str = "AzureFile" self.intellectual_property = intellectual_property self.is_default = None + self.account_name = account_name + self.endpoint = endpoint + self.file_share_name = file_share_name + self.protocol = protocol + self.service_data_access_auth_identity = service_data_access_auth_identity + self.resource_group = resource_group + self.subscription_id = subscription_id -class InferencingServer(msrest.serialization.Model): +class InferencingServer(_serialization.Model): """InferencingServer. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: AzureMLBatchInferencingServer, AzureMLOnlineInferencingServer, CustomInferencingServer, TritonInferencingServer. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + AzureMLBatchInferencingServer, AzureMLOnlineInferencingServer, CustomInferencingServer, + TritonInferencingServer All required parameters must be populated in order to send to Azure. - :ivar server_type: Required. [Required] Inferencing server type for various targets.Constant - filled by server. Possible values include: "AzureMLOnline", "AzureMLBatch", "Triton", "Custom". + :ivar server_type: [Required] Inferencing server type for various targets. Required. Known + values are: "AzureMLOnline", "AzureMLBatch", "Triton", and "Custom". :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType """ _validation = { - 'server_type': {'required': True}, + "server_type": {"required": True}, } _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, + "server_type": {"key": "serverType", "type": "str"}, } _subtype_map = { - 'server_type': {'AzureMLBatch': 'AzureMLBatchInferencingServer', 'AzureMLOnline': 'AzureMLOnlineInferencingServer', 'Custom': 'CustomInferencingServer', 'Triton': 'TritonInferencingServer'} + "server_type": { + "AzureMLBatch": "AzureMLBatchInferencingServer", + "AzureMLOnline": "AzureMLOnlineInferencingServer", + "Custom": "CustomInferencingServer", + "Triton": "TritonInferencingServer", + } } - def __init__( - self, - **kwargs - ): - """ - """ - super(InferencingServer, self).__init__(**kwargs) - self.server_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.server_type: Optional[str] = None class AzureMLBatchInferencingServer(InferencingServer): @@ -3664,34 +3636,29 @@ class AzureMLBatchInferencingServer(InferencingServer): All required parameters must be populated in order to send to Azure. - :ivar server_type: Required. [Required] Inferencing server type for various targets.Constant - filled by server. Possible values include: "AzureMLOnline", "AzureMLBatch", "Triton", "Custom". + :ivar server_type: [Required] Inferencing server type for various targets. Required. Known + values are: "AzureMLOnline", "AzureMLBatch", "Triton", and "Custom". :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType :ivar code_configuration: Code configuration for AML batch inferencing server. :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration """ _validation = { - 'server_type': {'required': True}, + "server_type": {"required": True}, } _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + "server_type": {"key": "serverType", "type": "str"}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, } - def __init__( - self, - *, - code_configuration: Optional["CodeConfiguration"] = None, - **kwargs - ): + def __init__(self, *, code_configuration: Optional["_models.CodeConfiguration"] = None, **kwargs: Any) -> None: """ :keyword code_configuration: Code configuration for AML batch inferencing server. :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration """ - super(AzureMLBatchInferencingServer, self).__init__(**kwargs) - self.server_type = 'AzureMLBatch' # type: str + super().__init__(**kwargs) + self.server_type: str = "AzureMLBatch" self.code_configuration = code_configuration @@ -3700,42 +3667,37 @@ class AzureMLOnlineInferencingServer(InferencingServer): All required parameters must be populated in order to send to Azure. - :ivar server_type: Required. [Required] Inferencing server type for various targets.Constant - filled by server. Possible values include: "AzureMLOnline", "AzureMLBatch", "Triton", "Custom". + :ivar server_type: [Required] Inferencing server type for various targets. Required. Known + values are: "AzureMLOnline", "AzureMLBatch", "Triton", and "Custom". :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType :ivar code_configuration: Code configuration for AML inferencing server. :vartype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration """ _validation = { - 'server_type': {'required': True}, + "server_type": {"required": True}, } _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, + "server_type": {"key": "serverType", "type": "str"}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, } - def __init__( - self, - *, - code_configuration: Optional["CodeConfiguration"] = None, - **kwargs - ): + def __init__(self, *, code_configuration: Optional["_models.CodeConfiguration"] = None, **kwargs: Any) -> None: """ :keyword code_configuration: Code configuration for AML inferencing server. :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration """ - super(AzureMLOnlineInferencingServer, self).__init__(**kwargs) - self.server_type = 'AzureMLOnline' # type: str + super().__init__(**kwargs) + self.server_type: str = "AzureMLOnline" self.code_configuration = code_configuration -class EarlyTerminationPolicy(msrest.serialization.Model): +class EarlyTerminationPolicy(_serialization.Model): """Early termination policies enable canceling poor-performing runs before they complete. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BanditPolicy, MedianStoppingPolicy, TruncationSelectionPolicy. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + BanditPolicy, MedianStoppingPolicy, TruncationSelectionPolicy All required parameters must be populated in order to send to Azure. @@ -3743,47 +3705,46 @@ class EarlyTerminationPolicy(msrest.serialization.Model): :vartype delay_evaluation: int :ivar evaluation_interval: Interval (number of runs) between policy evaluations. :vartype evaluation_interval: int - :ivar policy_type: Required. [Required] Name of policy configuration.Constant filled by server. - Possible values include: "Bandit", "MedianStopping", "TruncationSelection". + :ivar policy_type: [Required] Name of policy configuration. Required. Known values are: + "Bandit", "MedianStopping", and "TruncationSelection". :vartype policy_type: str or ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType """ _validation = { - 'policy_type': {'required': True}, + "policy_type": {"required": True}, } _attribute_map = { - 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, - 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, - 'policy_type': {'key': 'policyType', 'type': 'str'}, + "delay_evaluation": {"key": "delayEvaluation", "type": "int"}, + "evaluation_interval": {"key": "evaluationInterval", "type": "int"}, + "policy_type": {"key": "policyType", "type": "str"}, } _subtype_map = { - 'policy_type': {'Bandit': 'BanditPolicy', 'MedianStopping': 'MedianStoppingPolicy', 'TruncationSelection': 'TruncationSelectionPolicy'} + "policy_type": { + "Bandit": "BanditPolicy", + "MedianStopping": "MedianStoppingPolicy", + "TruncationSelection": "TruncationSelectionPolicy", + } } - def __init__( - self, - *, - delay_evaluation: Optional[int] = 0, - evaluation_interval: Optional[int] = 0, - **kwargs - ): + def __init__(self, *, delay_evaluation: int = 0, evaluation_interval: int = 0, **kwargs: Any) -> None: """ :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. :paramtype delay_evaluation: int :keyword evaluation_interval: Interval (number of runs) between policy evaluations. :paramtype evaluation_interval: int """ - super(EarlyTerminationPolicy, self).__init__(**kwargs) + super().__init__(**kwargs) self.delay_evaluation = delay_evaluation self.evaluation_interval = evaluation_interval - self.policy_type = None # type: Optional[str] + self.policy_type: Optional[str] = None class BanditPolicy(EarlyTerminationPolicy): - """Defines an early termination policy based on slack criteria, and a frequency and delay interval for evaluation. + """Defines an early termination policy based on slack criteria, and a frequency and delay interval + for evaluation. All required parameters must be populated in order to send to Azure. @@ -3791,8 +3752,8 @@ class BanditPolicy(EarlyTerminationPolicy): :vartype delay_evaluation: int :ivar evaluation_interval: Interval (number of runs) between policy evaluations. :vartype evaluation_interval: int - :ivar policy_type: Required. [Required] Name of policy configuration.Constant filled by server. - Possible values include: "Bandit", "MedianStopping", "TruncationSelection". + :ivar policy_type: [Required] Name of policy configuration. Required. Known values are: + "Bandit", "MedianStopping", and "TruncationSelection". :vartype policy_type: str or ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType :ivar slack_amount: Absolute distance allowed from the best performing run. @@ -3802,26 +3763,26 @@ class BanditPolicy(EarlyTerminationPolicy): """ _validation = { - 'policy_type': {'required': True}, + "policy_type": {"required": True}, } _attribute_map = { - 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, - 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, - 'policy_type': {'key': 'policyType', 'type': 'str'}, - 'slack_amount': {'key': 'slackAmount', 'type': 'float'}, - 'slack_factor': {'key': 'slackFactor', 'type': 'float'}, + "delay_evaluation": {"key": "delayEvaluation", "type": "int"}, + "evaluation_interval": {"key": "evaluationInterval", "type": "int"}, + "policy_type": {"key": "policyType", "type": "str"}, + "slack_amount": {"key": "slackAmount", "type": "float"}, + "slack_factor": {"key": "slackFactor", "type": "float"}, } def __init__( self, *, - delay_evaluation: Optional[int] = 0, - evaluation_interval: Optional[int] = 0, - slack_amount: Optional[float] = 0, - slack_factor: Optional[float] = 0, - **kwargs - ): + delay_evaluation: int = 0, + evaluation_interval: int = 0, + slack_amount: float = 0, + slack_factor: float = 0, + **kwargs: Any + ) -> None: """ :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. :paramtype delay_evaluation: int @@ -3832,46 +3793,40 @@ def __init__( :keyword slack_factor: Ratio of the allowed distance from the best performing run. :paramtype slack_factor: float """ - super(BanditPolicy, self).__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) - self.policy_type = 'Bandit' # type: str + super().__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) + self.policy_type: str = "Bandit" self.slack_amount = slack_amount self.slack_factor = slack_factor -class BaseEnvironmentSource(msrest.serialization.Model): +class BaseEnvironmentSource(_serialization.Model): """BaseEnvironmentSource. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BaseEnvironmentId. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + BaseEnvironmentId All required parameters must be populated in order to send to Azure. - :ivar base_environment_source_type: Required. [Required] Base environment type.Constant filled - by server. Possible values include: "EnvironmentAsset". + :ivar base_environment_source_type: [Required] Base environment type. Required. + "EnvironmentAsset" :vartype base_environment_source_type: str or ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSourceType """ _validation = { - 'base_environment_source_type': {'required': True}, + "base_environment_source_type": {"required": True}, } _attribute_map = { - 'base_environment_source_type': {'key': 'baseEnvironmentSourceType', 'type': 'str'}, + "base_environment_source_type": {"key": "baseEnvironmentSourceType", "type": "str"}, } - _subtype_map = { - 'base_environment_source_type': {'EnvironmentAsset': 'BaseEnvironmentId'} - } + _subtype_map = {"base_environment_source_type": {"EnvironmentAsset": "BaseEnvironmentId"}} - def __init__( - self, - **kwargs - ): - """ - """ - super(BaseEnvironmentSource, self).__init__(**kwargs) - self.base_environment_source_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.base_environment_source_type: Optional[str] = None class BaseEnvironmentId(BaseEnvironmentSource): @@ -3879,40 +3834,35 @@ class BaseEnvironmentId(BaseEnvironmentSource): All required parameters must be populated in order to send to Azure. - :ivar base_environment_source_type: Required. [Required] Base environment type.Constant filled - by server. Possible values include: "EnvironmentAsset". + :ivar base_environment_source_type: [Required] Base environment type. Required. + "EnvironmentAsset" :vartype base_environment_source_type: str or ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSourceType - :ivar resource_id: Required. [Required] Resource id accepting ArmId or AzureMlId. + :ivar resource_id: [Required] Resource id accepting ArmId or AzureMlId. Required. :vartype resource_id: str """ _validation = { - 'base_environment_source_type': {'required': True}, - 'resource_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "base_environment_source_type": {"required": True}, + "resource_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'base_environment_source_type': {'key': 'baseEnvironmentSourceType', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, + "base_environment_source_type": {"key": "baseEnvironmentSourceType", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, } - def __init__( - self, - *, - resource_id: str, - **kwargs - ): + def __init__(self, *, resource_id: str, **kwargs: Any) -> None: """ - :keyword resource_id: Required. [Required] Resource id accepting ArmId or AzureMlId. + :keyword resource_id: [Required] Resource id accepting ArmId or AzureMlId. Required. :paramtype resource_id: str """ - super(BaseEnvironmentId, self).__init__(**kwargs) - self.base_environment_source_type = 'EnvironmentAsset' # type: str + super().__init__(**kwargs) + self.base_environment_source_type: str = "EnvironmentAsset" self.resource_id = resource_id -class Resource(msrest.serialization.Model): +class Resource(_serialization.Model): """Common fields that are returned in the response for all Azure Resource Manager resources. Variables are only populated by the server, and will be ignored when sending a request. @@ -3931,26 +3881,22 @@ class Resource(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(Resource, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.id = None self.name = None self.type = None @@ -3958,7 +3904,8 @@ def __init__( class TrackedResource(Resource): - """The resource model definition for an Azure Resource Manager tracked top level resource which has 'tags' and a 'location'. + """The resource model definition for an Azure Resource Manager tracked top level resource which + has 'tags' and a 'location'. Variables are only populated by the server, and will be ignored when sending a request. @@ -3975,43 +3922,37 @@ class TrackedResource(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. + :ivar location: The geo-location where the resource lives. Required. :vartype location: str """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, } - def __init__( - self, - *, - location: str, - tags: Optional[Dict[str, str]] = None, - **kwargs - ): + def __init__(self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: """ - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. + :keyword location: The geo-location where the resource lives. Required. :paramtype location: str """ - super(TrackedResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.tags = tags self.location = location @@ -4034,113 +3975,109 @@ class BatchDeployment(TrackedResource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. + :ivar location: The geo-location where the resource lives. Required. :vartype location: str :ivar identity: Managed service identity (system assigned and/or user assigned identities). :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type. :vartype kind: str - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.BatchDeploymentProperties :ivar sku: Sku details required for ARM contract for Autoscaling. :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'BatchDeploymentProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "BatchDeploymentProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } def __init__( self, *, location: str, - properties: "BatchDeploymentProperties", + properties: "_models.BatchDeploymentProperties", tags: Optional[Dict[str, str]] = None, - identity: Optional["ManagedServiceIdentity"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, kind: Optional[str] = None, - sku: Optional["Sku"] = None, - **kwargs - ): + sku: Optional["_models.Sku"] = None, + **kwargs: Any + ) -> None: """ - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. + :keyword location: The geo-location where the resource lives. Required. :paramtype location: str :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type. :paramtype kind: str - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.BatchDeploymentProperties :keyword sku: Sku details required for ARM contract for Autoscaling. :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ - super(BatchDeployment, self).__init__(tags=tags, location=location, **kwargs) + super().__init__(tags=tags, location=location, **kwargs) self.identity = identity self.kind = kind self.properties = properties self.sku = sku -class BatchDeploymentConfiguration(msrest.serialization.Model): +class BatchDeploymentConfiguration(_serialization.Model): """Properties relevant to different deployment types. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BatchPipelineComponentDeploymentConfiguration. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + BatchPipelineComponentDeploymentConfiguration All required parameters must be populated in order to send to Azure. - :ivar deployment_configuration_type: Required. [Required] The type of the deployment.Constant - filled by server. Possible values include: "Model", "PipelineComponent". + :ivar deployment_configuration_type: [Required] The type of the deployment. Required. Known + values are: "Model" and "PipelineComponent". :vartype deployment_configuration_type: str or ~azure.mgmt.machinelearningservices.models.BatchDeploymentConfigurationType """ _validation = { - 'deployment_configuration_type': {'required': True}, + "deployment_configuration_type": {"required": True}, } _attribute_map = { - 'deployment_configuration_type': {'key': 'deploymentConfigurationType', 'type': 'str'}, + "deployment_configuration_type": {"key": "deploymentConfigurationType", "type": "str"}, } _subtype_map = { - 'deployment_configuration_type': {'PipelineComponent': 'BatchPipelineComponentDeploymentConfiguration'} + "deployment_configuration_type": {"PipelineComponent": "BatchPipelineComponentDeploymentConfiguration"} } - def __init__( - self, - **kwargs - ): - """ - """ - super(BatchDeploymentConfiguration, self).__init__(**kwargs) - self.deployment_configuration_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.deployment_configuration_type: Optional[str] = None -class EndpointDeploymentPropertiesBase(msrest.serialization.Model): +class EndpointDeploymentPropertiesBase(_serialization.Model): """Base definition for endpoint deployment. :ivar code_configuration: Code configuration for the endpoint deployment. @@ -4157,23 +4094,23 @@ class EndpointDeploymentPropertiesBase(msrest.serialization.Model): """ _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, } def __init__( self, *, - code_configuration: Optional["CodeConfiguration"] = None, + code_configuration: Optional["_models.CodeConfiguration"] = None, description: Optional[str] = None, environment_id: Optional[str] = None, environment_variables: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword code_configuration: Code configuration for the endpoint deployment. :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration @@ -4187,7 +4124,7 @@ def __init__( :keyword properties: Property dictionary. Properties can be added, but not removed or altered. :paramtype properties: dict[str, str] """ - super(EndpointDeploymentPropertiesBase, self).__init__(**kwargs) + super().__init__(**kwargs) self.code_configuration = code_configuration self.description = description self.environment_id = environment_id @@ -4195,7 +4132,7 @@ def __init__( self.properties = properties -class BatchDeploymentProperties(EndpointDeploymentPropertiesBase): +class BatchDeploymentProperties(EndpointDeploymentPropertiesBase): # pylint: disable=too-many-instance-attributes """Batch inference settings per deployment. Variables are only populated by the server, and will be ignored when sending a request. @@ -4223,24 +4160,24 @@ class BatchDeploymentProperties(EndpointDeploymentPropertiesBase): For TabularDataset, this value is the count of record failures. If set to -1 (the lower bound), all failures during batch inference will be ignored. :vartype error_threshold: int - :ivar logging_level: Logging level for batch inference operation. Possible values include: - "Info", "Warning", "Debug". + :ivar logging_level: Logging level for batch inference operation. Known values are: "Info", + "Warning", and "Debug". :vartype logging_level: str or ~azure.mgmt.machinelearningservices.models.BatchLoggingLevel :ivar max_concurrency_per_instance: Indicates maximum number of parallelism per instance. :vartype max_concurrency_per_instance: int :ivar mini_batch_size: Size of the mini-batch passed to each batch invocation. For FileDataset, this is the number of files per mini-batch. For TabularDataset, this is the size of the records in bytes, per mini-batch. - :vartype mini_batch_size: long + :vartype mini_batch_size: int :ivar model: Reference to the model asset for the endpoint deployment. :vartype model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase - :ivar output_action: Indicates how the output will be organized. Possible values include: - "SummaryOnly", "AppendRow". + :ivar output_action: Indicates how the output will be organized. Known values are: + "SummaryOnly" and "AppendRow". :vartype output_action: str or ~azure.mgmt.machinelearningservices.models.BatchOutputAction :ivar output_file_name: Customized output file name for append_row output action. :vartype output_file_name: str - :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values - include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled". + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState :ivar resources: Indicates compute configuration for the job. @@ -4252,50 +4189,50 @@ class BatchDeploymentProperties(EndpointDeploymentPropertiesBase): """ _validation = { - 'provisioning_state': {'readonly': True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'compute': {'key': 'compute', 'type': 'str'}, - 'deployment_configuration': {'key': 'deploymentConfiguration', 'type': 'BatchDeploymentConfiguration'}, - 'error_threshold': {'key': 'errorThreshold', 'type': 'int'}, - 'logging_level': {'key': 'loggingLevel', 'type': 'str'}, - 'max_concurrency_per_instance': {'key': 'maxConcurrencyPerInstance', 'type': 'int'}, - 'mini_batch_size': {'key': 'miniBatchSize', 'type': 'long'}, - 'model': {'key': 'model', 'type': 'AssetReferenceBase'}, - 'output_action': {'key': 'outputAction', 'type': 'str'}, - 'output_file_name': {'key': 'outputFileName', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'resources': {'key': 'resources', 'type': 'DeploymentResourceConfiguration'}, - 'retry_settings': {'key': 'retrySettings', 'type': 'BatchRetrySettings'}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "compute": {"key": "compute", "type": "str"}, + "deployment_configuration": {"key": "deploymentConfiguration", "type": "BatchDeploymentConfiguration"}, + "error_threshold": {"key": "errorThreshold", "type": "int"}, + "logging_level": {"key": "loggingLevel", "type": "str"}, + "max_concurrency_per_instance": {"key": "maxConcurrencyPerInstance", "type": "int"}, + "mini_batch_size": {"key": "miniBatchSize", "type": "int"}, + "model": {"key": "model", "type": "AssetReferenceBase"}, + "output_action": {"key": "outputAction", "type": "str"}, + "output_file_name": {"key": "outputFileName", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "resources": {"key": "resources", "type": "DeploymentResourceConfiguration"}, + "retry_settings": {"key": "retrySettings", "type": "BatchRetrySettings"}, } def __init__( self, *, - code_configuration: Optional["CodeConfiguration"] = None, + code_configuration: Optional["_models.CodeConfiguration"] = None, description: Optional[str] = None, environment_id: Optional[str] = None, environment_variables: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, compute: Optional[str] = None, - deployment_configuration: Optional["BatchDeploymentConfiguration"] = None, - error_threshold: Optional[int] = -1, - logging_level: Optional[Union[str, "BatchLoggingLevel"]] = None, - max_concurrency_per_instance: Optional[int] = 1, - mini_batch_size: Optional[int] = 10, - model: Optional["AssetReferenceBase"] = None, - output_action: Optional[Union[str, "BatchOutputAction"]] = None, - output_file_name: Optional[str] = "predictions.csv", - resources: Optional["DeploymentResourceConfiguration"] = None, - retry_settings: Optional["BatchRetrySettings"] = None, - **kwargs - ): + deployment_configuration: Optional["_models.BatchDeploymentConfiguration"] = None, + error_threshold: int = -1, + logging_level: Optional[Union[str, "_models.BatchLoggingLevel"]] = None, + max_concurrency_per_instance: int = 1, + mini_batch_size: int = 10, + model: Optional["_models.AssetReferenceBase"] = None, + output_action: Optional[Union[str, "_models.BatchOutputAction"]] = None, + output_file_name: str = "predictions.csv", + resources: Optional["_models.DeploymentResourceConfiguration"] = None, + retry_settings: Optional["_models.BatchRetrySettings"] = None, + **kwargs: Any + ) -> None: """ :keyword code_configuration: Code configuration for the endpoint deployment. :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration @@ -4320,19 +4257,19 @@ def __init__( For TabularDataset, this value is the count of record failures. If set to -1 (the lower bound), all failures during batch inference will be ignored. :paramtype error_threshold: int - :keyword logging_level: Logging level for batch inference operation. Possible values include: - "Info", "Warning", "Debug". + :keyword logging_level: Logging level for batch inference operation. Known values are: "Info", + "Warning", and "Debug". :paramtype logging_level: str or ~azure.mgmt.machinelearningservices.models.BatchLoggingLevel :keyword max_concurrency_per_instance: Indicates maximum number of parallelism per instance. :paramtype max_concurrency_per_instance: int :keyword mini_batch_size: Size of the mini-batch passed to each batch invocation. For FileDataset, this is the number of files per mini-batch. For TabularDataset, this is the size of the records in bytes, per mini-batch. - :paramtype mini_batch_size: long + :paramtype mini_batch_size: int :keyword model: Reference to the model asset for the endpoint deployment. :paramtype model: ~azure.mgmt.machinelearningservices.models.AssetReferenceBase - :keyword output_action: Indicates how the output will be organized. Possible values include: - "SummaryOnly", "AppendRow". + :keyword output_action: Indicates how the output will be organized. Known values are: + "SummaryOnly" and "AppendRow". :paramtype output_action: str or ~azure.mgmt.machinelearningservices.models.BatchOutputAction :keyword output_file_name: Customized output file name for append_row output action. :paramtype output_file_name: str @@ -4344,7 +4281,14 @@ def __init__( If not provided, will default to the defaults defined in BatchRetrySettings. :paramtype retry_settings: ~azure.mgmt.machinelearningservices.models.BatchRetrySettings """ - super(BatchDeploymentProperties, self).__init__(code_configuration=code_configuration, description=description, environment_id=environment_id, environment_variables=environment_variables, properties=properties, **kwargs) + super().__init__( + code_configuration=code_configuration, + description=description, + environment_id=environment_id, + environment_variables=environment_variables, + properties=properties, + **kwargs + ) self.compute = compute self.deployment_configuration = deployment_configuration self.error_threshold = error_threshold @@ -4359,7 +4303,7 @@ def __init__( self.retry_settings = retry_settings -class BatchDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Model): +class BatchDeploymentTrackedResourceArmPaginatedResult(_serialization.Model): """A paginated list of BatchDeployment entities. :ivar next_link: The link to the next page of BatchDeployment objects. If null, there are no @@ -4370,17 +4314,13 @@ class BatchDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Mode """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[BatchDeployment]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[BatchDeployment]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["BatchDeployment"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.BatchDeployment"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of BatchDeployment objects. If null, there are no additional pages. @@ -4388,7 +4328,7 @@ def __init__( :keyword value: An array of objects of type BatchDeployment. :paramtype value: list[~azure.mgmt.machinelearningservices.models.BatchDeployment] """ - super(BatchDeploymentTrackedResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value @@ -4411,77 +4351,77 @@ class BatchEndpoint(TrackedResource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. + :ivar location: The geo-location where the resource lives. Required. :vartype location: str :ivar identity: Managed service identity (system assigned and/or user assigned identities). :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type. :vartype kind: str - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.BatchEndpointProperties :ivar sku: Sku details required for ARM contract for Autoscaling. :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'BatchEndpointProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "BatchEndpointProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } def __init__( self, *, location: str, - properties: "BatchEndpointProperties", + properties: "_models.BatchEndpointProperties", tags: Optional[Dict[str, str]] = None, - identity: Optional["ManagedServiceIdentity"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, kind: Optional[str] = None, - sku: Optional["Sku"] = None, - **kwargs - ): + sku: Optional["_models.Sku"] = None, + **kwargs: Any + ) -> None: """ - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. + :keyword location: The geo-location where the resource lives. Required. :paramtype location: str :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type. :paramtype kind: str - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.BatchEndpointProperties :keyword sku: Sku details required for ARM contract for Autoscaling. :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ - super(BatchEndpoint, self).__init__(tags=tags, location=location, **kwargs) + super().__init__(tags=tags, location=location, **kwargs) self.identity = identity self.kind = kind self.properties = properties self.sku = sku -class BatchEndpointDefaults(msrest.serialization.Model): +class BatchEndpointDefaults(_serialization.Model): """Batch endpoint default values. :ivar deployment_name: Name of the deployment that will be default for the endpoint. @@ -4490,34 +4430,29 @@ class BatchEndpointDefaults(msrest.serialization.Model): """ _attribute_map = { - 'deployment_name': {'key': 'deploymentName', 'type': 'str'}, + "deployment_name": {"key": "deploymentName", "type": "str"}, } - def __init__( - self, - *, - deployment_name: Optional[str] = None, - **kwargs - ): + def __init__(self, *, deployment_name: Optional[str] = None, **kwargs: Any) -> None: """ :keyword deployment_name: Name of the deployment that will be default for the endpoint. This deployment will end up getting 100% traffic when the endpoint scoring URL is invoked. :paramtype deployment_name: str """ - super(BatchEndpointDefaults, self).__init__(**kwargs) + super().__init__(**kwargs) self.deployment_name = deployment_name -class EndpointPropertiesBase(msrest.serialization.Model): +class EndpointPropertiesBase(_serialization.Model): """Inference Endpoint base definition. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' for - Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. - Possible values include: "AMLToken", "Key", "AADToken". + :ivar auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure + Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Required. Known values are: "AMLToken", "Key", and "AADToken". :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode :ivar description: Description of the inference endpoint. :vartype description: str @@ -4534,33 +4469,33 @@ class EndpointPropertiesBase(msrest.serialization.Model): """ _validation = { - 'auth_mode': {'required': True}, - 'scoring_uri': {'readonly': True}, - 'swagger_uri': {'readonly': True}, + "auth_mode": {"required": True}, + "scoring_uri": {"readonly": True}, + "swagger_uri": {"readonly": True}, } _attribute_map = { - 'auth_mode': {'key': 'authMode', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, - 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, + "auth_mode": {"key": "authMode", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "keys": {"key": "keys", "type": "EndpointAuthKeys"}, + "properties": {"key": "properties", "type": "{str}"}, + "scoring_uri": {"key": "scoringUri", "type": "str"}, + "swagger_uri": {"key": "swaggerUri", "type": "str"}, } def __init__( self, *, - auth_mode: Union[str, "EndpointAuthMode"], + auth_mode: Union[str, "_models.EndpointAuthMode"], description: Optional[str] = None, - keys: Optional["EndpointAuthKeys"] = None, + keys: Optional["_models.EndpointAuthKeys"] = None, properties: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' - for Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' - does. Possible values include: "AMLToken", "Key", "AADToken". + :keyword auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure + Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Required. Known values are: "AMLToken", "Key", and "AADToken". :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode :keyword description: Description of the inference endpoint. :paramtype description: str @@ -4571,7 +4506,7 @@ def __init__( :keyword properties: Property dictionary. Properties can be added, but not removed or altered. :paramtype properties: dict[str, str] """ - super(EndpointPropertiesBase, self).__init__(**kwargs) + super().__init__(**kwargs) self.auth_mode = auth_mode self.description = description self.keys = keys @@ -4587,9 +4522,9 @@ class BatchEndpointProperties(EndpointPropertiesBase): All required parameters must be populated in order to send to Azure. - :ivar auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' for - Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. - Possible values include: "AMLToken", "Key", "AADToken". + :ivar auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure + Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Required. Known values are: "AMLToken", "Key", and "AADToken". :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode :ivar description: Description of the inference endpoint. :vartype description: str @@ -4605,44 +4540,44 @@ class BatchEndpointProperties(EndpointPropertiesBase): :vartype swagger_uri: str :ivar defaults: Default values for Batch Endpoint. :vartype defaults: ~azure.mgmt.machinelearningservices.models.BatchEndpointDefaults - :ivar provisioning_state: Provisioning state for the endpoint. Possible values include: - "Creating", "Deleting", "Succeeded", "Failed", "Updating", "Canceled". + :ivar provisioning_state: Provisioning state for the endpoint. Known values are: "Creating", + "Deleting", "Succeeded", "Failed", "Updating", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState """ _validation = { - 'auth_mode': {'required': True}, - 'scoring_uri': {'readonly': True}, - 'swagger_uri': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "auth_mode": {"required": True}, + "scoring_uri": {"readonly": True}, + "swagger_uri": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'auth_mode': {'key': 'authMode', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, - 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, - 'defaults': {'key': 'defaults', 'type': 'BatchEndpointDefaults'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "auth_mode": {"key": "authMode", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "keys": {"key": "keys", "type": "EndpointAuthKeys"}, + "properties": {"key": "properties", "type": "{str}"}, + "scoring_uri": {"key": "scoringUri", "type": "str"}, + "swagger_uri": {"key": "swaggerUri", "type": "str"}, + "defaults": {"key": "defaults", "type": "BatchEndpointDefaults"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } def __init__( self, *, - auth_mode: Union[str, "EndpointAuthMode"], + auth_mode: Union[str, "_models.EndpointAuthMode"], description: Optional[str] = None, - keys: Optional["EndpointAuthKeys"] = None, + keys: Optional["_models.EndpointAuthKeys"] = None, properties: Optional[Dict[str, str]] = None, - defaults: Optional["BatchEndpointDefaults"] = None, - **kwargs - ): + defaults: Optional["_models.BatchEndpointDefaults"] = None, + **kwargs: Any + ) -> None: """ - :keyword auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' - for Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' - does. Possible values include: "AMLToken", "Key", "AADToken". + :keyword auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure + Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Required. Known values are: "AMLToken", "Key", and "AADToken". :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode :keyword description: Description of the inference endpoint. :paramtype description: str @@ -4655,12 +4590,12 @@ def __init__( :keyword defaults: Default values for Batch Endpoint. :paramtype defaults: ~azure.mgmt.machinelearningservices.models.BatchEndpointDefaults """ - super(BatchEndpointProperties, self).__init__(auth_mode=auth_mode, description=description, keys=keys, properties=properties, **kwargs) + super().__init__(auth_mode=auth_mode, description=description, keys=keys, properties=properties, **kwargs) self.defaults = defaults self.provisioning_state = None -class BatchEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model): +class BatchEndpointTrackedResourceArmPaginatedResult(_serialization.Model): """A paginated list of BatchEndpoint entities. :ivar next_link: The link to the next page of BatchEndpoint objects. If null, there are no @@ -4671,17 +4606,13 @@ class BatchEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model) """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[BatchEndpoint]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[BatchEndpoint]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["BatchEndpoint"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.BatchEndpoint"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of BatchEndpoint objects. If null, there are no additional pages. @@ -4689,7 +4620,7 @@ def __init__( :keyword value: An array of objects of type BatchEndpoint. :paramtype value: list[~azure.mgmt.machinelearningservices.models.BatchEndpoint] """ - super(BatchEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value @@ -4699,8 +4630,8 @@ class BatchPipelineComponentDeploymentConfiguration(BatchDeploymentConfiguration All required parameters must be populated in order to send to Azure. - :ivar deployment_configuration_type: Required. [Required] The type of the deployment.Constant - filled by server. Possible values include: "Model", "PipelineComponent". + :ivar deployment_configuration_type: [Required] The type of the deployment. Required. Known + values are: "Model" and "PipelineComponent". :vartype deployment_configuration_type: str or ~azure.mgmt.machinelearningservices.models.BatchDeploymentConfigurationType :ivar component_id: The ARM id of the component to be run. @@ -4709,31 +4640,31 @@ class BatchPipelineComponentDeploymentConfiguration(BatchDeploymentConfiguration :vartype description: str :ivar settings: Run-time settings for the pipeline job. :vartype settings: dict[str, str] - :ivar tags: A set of tags. The tags which will be applied to the job. + :ivar tags: The tags which will be applied to the job. :vartype tags: dict[str, str] """ _validation = { - 'deployment_configuration_type': {'required': True}, + "deployment_configuration_type": {"required": True}, } _attribute_map = { - 'deployment_configuration_type': {'key': 'deploymentConfigurationType', 'type': 'str'}, - 'component_id': {'key': 'componentId', 'type': 'IdAssetReference'}, - 'description': {'key': 'description', 'type': 'str'}, - 'settings': {'key': 'settings', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + "deployment_configuration_type": {"key": "deploymentConfigurationType", "type": "str"}, + "component_id": {"key": "componentId", "type": "IdAssetReference"}, + "description": {"key": "description", "type": "str"}, + "settings": {"key": "settings", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, } def __init__( self, *, - component_id: Optional["IdAssetReference"] = None, + component_id: Optional["_models.IdAssetReference"] = None, description: Optional[str] = None, settings: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword component_id: The ARM id of the component to be run. :paramtype component_id: ~azure.mgmt.machinelearningservices.models.IdAssetReference @@ -4741,18 +4672,18 @@ def __init__( :paramtype description: str :keyword settings: Run-time settings for the pipeline job. :paramtype settings: dict[str, str] - :keyword tags: A set of tags. The tags which will be applied to the job. + :keyword tags: The tags which will be applied to the job. :paramtype tags: dict[str, str] """ - super(BatchPipelineComponentDeploymentConfiguration, self).__init__(**kwargs) - self.deployment_configuration_type = 'PipelineComponent' # type: str + super().__init__(**kwargs) + self.deployment_configuration_type: str = "PipelineComponent" self.component_id = component_id self.description = description self.settings = settings self.tags = tags -class BatchRetrySettings(msrest.serialization.Model): +class BatchRetrySettings(_serialization.Model): """Retry settings for a batch inference operation. :ivar max_retries: Maximum retry count for a mini-batch. @@ -4762,64 +4693,58 @@ class BatchRetrySettings(msrest.serialization.Model): """ _attribute_map = { - 'max_retries': {'key': 'maxRetries', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, + "max_retries": {"key": "maxRetries", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, } - def __init__( - self, - *, - max_retries: Optional[int] = 3, - timeout: Optional[datetime.timedelta] = "PT30S", - **kwargs - ): + def __init__(self, *, max_retries: int = 3, timeout: datetime.timedelta = "PT30S", **kwargs: Any) -> None: """ :keyword max_retries: Maximum retry count for a mini-batch. :paramtype max_retries: int :keyword timeout: Invocation timeout for a mini-batch, in ISO 8601 format. :paramtype timeout: ~datetime.timedelta """ - super(BatchRetrySettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.max_retries = max_retries self.timeout = timeout -class SamplingAlgorithm(msrest.serialization.Model): +class SamplingAlgorithm(_serialization.Model): """The Sampling Algorithm used to generate hyperparameter values, along with properties to -configure the algorithm. + configure the algorithm. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: BayesianSamplingAlgorithm, GridSamplingAlgorithm, RandomSamplingAlgorithm. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + BayesianSamplingAlgorithm, GridSamplingAlgorithm, RandomSamplingAlgorithm All required parameters must be populated in order to send to Azure. - :ivar sampling_algorithm_type: Required. [Required] The algorithm used for generating - hyperparameter values, along with configuration properties.Constant filled by server. Possible - values include: "Grid", "Random", "Bayesian". + :ivar sampling_algorithm_type: [Required] The algorithm used for generating hyperparameter + values, along with configuration properties. Required. Known values are: "Grid", "Random", and + "Bayesian". :vartype sampling_algorithm_type: str or ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ _validation = { - 'sampling_algorithm_type': {'required': True}, + "sampling_algorithm_type": {"required": True}, } _attribute_map = { - 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, + "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, } _subtype_map = { - 'sampling_algorithm_type': {'Bayesian': 'BayesianSamplingAlgorithm', 'Grid': 'GridSamplingAlgorithm', 'Random': 'RandomSamplingAlgorithm'} + "sampling_algorithm_type": { + "Bayesian": "BayesianSamplingAlgorithm", + "Grid": "GridSamplingAlgorithm", + "Random": "RandomSamplingAlgorithm", + } } - def __init__( - self, - **kwargs - ): - """ - """ - super(SamplingAlgorithm, self).__init__(**kwargs) - self.sampling_algorithm_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.sampling_algorithm_type: Optional[str] = None class BayesianSamplingAlgorithm(SamplingAlgorithm): @@ -4827,32 +4752,28 @@ class BayesianSamplingAlgorithm(SamplingAlgorithm): All required parameters must be populated in order to send to Azure. - :ivar sampling_algorithm_type: Required. [Required] The algorithm used for generating - hyperparameter values, along with configuration properties.Constant filled by server. Possible - values include: "Grid", "Random", "Bayesian". + :ivar sampling_algorithm_type: [Required] The algorithm used for generating hyperparameter + values, along with configuration properties. Required. Known values are: "Grid", "Random", and + "Bayesian". :vartype sampling_algorithm_type: str or ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ _validation = { - 'sampling_algorithm_type': {'required': True}, + "sampling_algorithm_type": {"required": True}, } _attribute_map = { - 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, + "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(BayesianSamplingAlgorithm, self).__init__(**kwargs) - self.sampling_algorithm_type = 'Bayesian' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.sampling_algorithm_type: str = "Bayesian" -class BindOptions(msrest.serialization.Model): +class BindOptions(_serialization.Model): """BindOptions. :ivar propagation: Type of Bind Option. @@ -4864,9 +4785,9 @@ class BindOptions(msrest.serialization.Model): """ _attribute_map = { - 'propagation': {'key': 'propagation', 'type': 'str'}, - 'create_host_path': {'key': 'createHostPath', 'type': 'bool'}, - 'selinux': {'key': 'selinux', 'type': 'str'}, + "propagation": {"key": "propagation", "type": "str"}, + "create_host_path": {"key": "createHostPath", "type": "bool"}, + "selinux": {"key": "selinux", "type": "str"}, } def __init__( @@ -4875,8 +4796,8 @@ def __init__( propagation: Optional[str] = None, create_host_path: Optional[bool] = None, selinux: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword propagation: Type of Bind Option. :paramtype propagation: str @@ -4885,13 +4806,13 @@ def __init__( :keyword selinux: Mention the selinux options. :paramtype selinux: str """ - super(BindOptions, self).__init__(**kwargs) + super().__init__(**kwargs) self.propagation = propagation self.create_host_path = create_host_path self.selinux = selinux -class BlobReferenceForConsumptionDto(msrest.serialization.Model): +class BlobReferenceForConsumptionDto(_serialization.Model): """BlobReferenceForConsumptionDto. :ivar blob_uri: Blob URI path for client to upload data. @@ -4904,19 +4825,19 @@ class BlobReferenceForConsumptionDto(msrest.serialization.Model): """ _attribute_map = { - 'blob_uri': {'key': 'blobUri', 'type': 'str'}, - 'credential': {'key': 'credential', 'type': 'PendingUploadCredentialDto'}, - 'storage_account_arm_id': {'key': 'storageAccountArmId', 'type': 'str'}, + "blob_uri": {"key": "blobUri", "type": "str"}, + "credential": {"key": "credential", "type": "PendingUploadCredentialDto"}, + "storage_account_arm_id": {"key": "storageAccountArmId", "type": "str"}, } def __init__( self, *, blob_uri: Optional[str] = None, - credential: Optional["PendingUploadCredentialDto"] = None, + credential: Optional["_models.PendingUploadCredentialDto"] = None, storage_account_arm_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword blob_uri: Blob URI path for client to upload data. Example: https://blob.windows.core.net/Container/Path. @@ -4926,84 +4847,80 @@ def __init__( :keyword storage_account_arm_id: Arm ID of the storage account to use. :paramtype storage_account_arm_id: str """ - super(BlobReferenceForConsumptionDto, self).__init__(**kwargs) + super().__init__(**kwargs) self.blob_uri = blob_uri self.credential = credential self.storage_account_arm_id = storage_account_arm_id -class BuildContext(msrest.serialization.Model): +class BuildContext(_serialization.Model): """Configuration settings for Docker build context. All required parameters must be populated in order to send to Azure. - :ivar context_uri: Required. [Required] URI of the Docker build context used to build the - image. Supports blob URIs on environment creation and may return blob or Git URIs. - - + :ivar context_uri: [Required] URI of the Docker build context used to build the image. Supports + blob URIs on environment creation and may return blob or Git URIs. + + .. raw:: html - + . + Required. :vartype context_uri: str :ivar dockerfile_path: Path to the Dockerfile in the build context. - - + + .. raw:: html - + . :vartype dockerfile_path: str """ _validation = { - 'context_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "context_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'context_uri': {'key': 'contextUri', 'type': 'str'}, - 'dockerfile_path': {'key': 'dockerfilePath', 'type': 'str'}, + "context_uri": {"key": "contextUri", "type": "str"}, + "dockerfile_path": {"key": "dockerfilePath", "type": "str"}, } - def __init__( - self, - *, - context_uri: str, - dockerfile_path: Optional[str] = "Dockerfile", - **kwargs - ): + def __init__(self, *, context_uri: str, dockerfile_path: str = "Dockerfile", **kwargs: Any) -> None: """ - :keyword context_uri: Required. [Required] URI of the Docker build context used to build the - image. Supports blob URIs on environment creation and may return blob or Git URIs. - - + :keyword context_uri: [Required] URI of the Docker build context used to build the image. + Supports blob URIs on environment creation and may return blob or Git URIs. + + .. raw:: html - + . + Required. :paramtype context_uri: str :keyword dockerfile_path: Path to the Dockerfile in the build context. - - + + .. raw:: html - + . :paramtype dockerfile_path: str """ - super(BuildContext, self).__init__(**kwargs) + super().__init__(**kwargs) self.context_uri = context_uri self.dockerfile_path = dockerfile_path -class DataDriftMetricThresholdBase(msrest.serialization.Model): +class DataDriftMetricThresholdBase(_serialization.Model): """DataDriftMetricThresholdBase. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CategoricalDataDriftMetricThreshold, NumericalDataDriftMetricThreshold. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CategoricalDataDriftMetricThreshold, NumericalDataDriftMetricThreshold All required parameters must be populated in order to send to Azure. - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType :ivar threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -5011,31 +4928,29 @@ class DataDriftMetricThresholdBase(msrest.serialization.Model): """ _validation = { - 'data_type': {'required': True}, + "data_type": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } _subtype_map = { - 'data_type': {'Categorical': 'CategoricalDataDriftMetricThreshold', 'Numerical': 'NumericalDataDriftMetricThreshold'} + "data_type": { + "Categorical": "CategoricalDataDriftMetricThreshold", + "Numerical": "NumericalDataDriftMetricThreshold", + } } - def __init__( - self, - *, - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + def __init__(self, *, threshold: Optional["_models.MonitoringThreshold"] = None, **kwargs: Any) -> None: """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ - super(DataDriftMetricThresholdBase, self).__init__(**kwargs) - self.data_type = None # type: Optional[str] + super().__init__(**kwargs) + self.data_type: Optional[str] = None self.threshold = threshold @@ -5044,58 +4959,58 @@ class CategoricalDataDriftMetricThreshold(DataDriftMetricThresholdBase): All required parameters must be populated in order to send to Azure. - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType :ivar threshold: The threshold value. If null, a default value will be set depending on the selected metric. :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The categorical data drift metric to calculate. Possible - values include: "JensenShannonDistance", "PopulationStabilityIndex", "PearsonsChiSquaredTest". + :ivar metric: [Required] The categorical data drift metric to calculate. Required. Known values + are: "JensenShannonDistance", "PopulationStabilityIndex", and "PearsonsChiSquaredTest". :vartype metric: str or ~azure.mgmt.machinelearningservices.models.CategoricalDataDriftMetric """ _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, + "data_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } def __init__( self, *, - metric: Union[str, "CategoricalDataDriftMetric"], - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + metric: Union[str, "_models.CategoricalDataDriftMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The categorical data drift metric to calculate. Possible - values include: "JensenShannonDistance", "PopulationStabilityIndex", "PearsonsChiSquaredTest". + :keyword metric: [Required] The categorical data drift metric to calculate. Required. Known + values are: "JensenShannonDistance", "PopulationStabilityIndex", and "PearsonsChiSquaredTest". :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.CategoricalDataDriftMetric """ - super(CategoricalDataDriftMetricThreshold, self).__init__(threshold=threshold, **kwargs) - self.data_type = 'Categorical' # type: str + super().__init__(threshold=threshold, **kwargs) + self.data_type: str = "Categorical" self.metric = metric -class DataQualityMetricThresholdBase(msrest.serialization.Model): +class DataQualityMetricThresholdBase(_serialization.Model): """DataQualityMetricThresholdBase. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CategoricalDataQualityMetricThreshold, NumericalDataQualityMetricThreshold. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CategoricalDataQualityMetricThreshold, NumericalDataQualityMetricThreshold All required parameters must be populated in order to send to Azure. - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType :ivar threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -5103,31 +5018,29 @@ class DataQualityMetricThresholdBase(msrest.serialization.Model): """ _validation = { - 'data_type': {'required': True}, + "data_type": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } _subtype_map = { - 'data_type': {'Categorical': 'CategoricalDataQualityMetricThreshold', 'Numerical': 'NumericalDataQualityMetricThreshold'} + "data_type": { + "Categorical": "CategoricalDataQualityMetricThreshold", + "Numerical": "NumericalDataQualityMetricThreshold", + } } - def __init__( - self, - *, - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + def __init__(self, *, threshold: Optional["_models.MonitoringThreshold"] = None, **kwargs: Any) -> None: """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ - super(DataQualityMetricThresholdBase, self).__init__(**kwargs) - self.data_type = None # type: Optional[str] + super().__init__(**kwargs) + self.data_type: Optional[str] = None self.threshold = threshold @@ -5136,59 +5049,59 @@ class CategoricalDataQualityMetricThreshold(DataQualityMetricThresholdBase): All required parameters must be populated in order to send to Azure. - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType :ivar threshold: The threshold value. If null, a default value will be set depending on the selected metric. :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The categorical data quality metric to calculate. Possible - values include: "NullValueRate", "DataTypeErrorRate", "OutOfBoundsRate". + :ivar metric: [Required] The categorical data quality metric to calculate. Required. Known + values are: "NullValueRate", "DataTypeErrorRate", and "OutOfBoundsRate". :vartype metric: str or ~azure.mgmt.machinelearningservices.models.CategoricalDataQualityMetric """ _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, + "data_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } def __init__( self, *, - metric: Union[str, "CategoricalDataQualityMetric"], - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + metric: Union[str, "_models.CategoricalDataQualityMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The categorical data quality metric to calculate. - Possible values include: "NullValueRate", "DataTypeErrorRate", "OutOfBoundsRate". + :keyword metric: [Required] The categorical data quality metric to calculate. Required. Known + values are: "NullValueRate", "DataTypeErrorRate", and "OutOfBoundsRate". :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.CategoricalDataQualityMetric """ - super(CategoricalDataQualityMetricThreshold, self).__init__(threshold=threshold, **kwargs) - self.data_type = 'Categorical' # type: str + super().__init__(threshold=threshold, **kwargs) + self.data_type: str = "Categorical" self.metric = metric -class PredictionDriftMetricThresholdBase(msrest.serialization.Model): +class PredictionDriftMetricThresholdBase(_serialization.Model): """PredictionDriftMetricThresholdBase. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CategoricalPredictionDriftMetricThreshold, NumericalPredictionDriftMetricThreshold. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CategoricalPredictionDriftMetricThreshold, NumericalPredictionDriftMetricThreshold All required parameters must be populated in order to send to Azure. - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType :ivar threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -5196,31 +5109,29 @@ class PredictionDriftMetricThresholdBase(msrest.serialization.Model): """ _validation = { - 'data_type': {'required': True}, + "data_type": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } _subtype_map = { - 'data_type': {'Categorical': 'CategoricalPredictionDriftMetricThreshold', 'Numerical': 'NumericalPredictionDriftMetricThreshold'} + "data_type": { + "Categorical": "CategoricalPredictionDriftMetricThreshold", + "Numerical": "NumericalPredictionDriftMetricThreshold", + } } - def __init__( - self, - *, - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + def __init__(self, *, threshold: Optional["_models.MonitoringThreshold"] = None, **kwargs: Any) -> None: """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ - super(PredictionDriftMetricThresholdBase, self).__init__(**kwargs) - self.data_type = None # type: Optional[str] + super().__init__(**kwargs) + self.data_type: Optional[str] = None self.threshold = threshold @@ -5229,49 +5140,48 @@ class CategoricalPredictionDriftMetricThreshold(PredictionDriftMetricThresholdBa All required parameters must be populated in order to send to Azure. - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType :ivar threshold: The threshold value. If null, a default value will be set depending on the selected metric. :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The categorical prediction drift metric to calculate. - Possible values include: "JensenShannonDistance", "PopulationStabilityIndex", - "PearsonsChiSquaredTest". + :ivar metric: [Required] The categorical prediction drift metric to calculate. Required. Known + values are: "JensenShannonDistance", "PopulationStabilityIndex", and "PearsonsChiSquaredTest". :vartype metric: str or ~azure.mgmt.machinelearningservices.models.CategoricalPredictionDriftMetric """ _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, + "data_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } def __init__( self, *, - metric: Union[str, "CategoricalPredictionDriftMetric"], - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + metric: Union[str, "_models.CategoricalPredictionDriftMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The categorical prediction drift metric to calculate. - Possible values include: "JensenShannonDistance", "PopulationStabilityIndex", + :keyword metric: [Required] The categorical prediction drift metric to calculate. Required. + Known values are: "JensenShannonDistance", "PopulationStabilityIndex", and "PearsonsChiSquaredTest". :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.CategoricalPredictionDriftMetric """ - super(CategoricalPredictionDriftMetricThreshold, self).__init__(threshold=threshold, **kwargs) - self.data_type = 'Categorical' # type: str + super().__init__(threshold=threshold, **kwargs) + self.data_type: str = "Categorical" self.metric = metric @@ -5280,71 +5190,71 @@ class CertificateDatastoreCredentials(DatastoreCredentials): All required parameters must be populated in order to send to Azure. - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType :ivar authority_url: Authority URL used for authentication. :vartype authority_url: str - :ivar client_id: Required. [Required] Service principal client ID. + :ivar client_id: [Required] Service principal client ID. Required. :vartype client_id: str :ivar resource_url: Resource the service principal has access to. :vartype resource_url: str - :ivar secrets: Required. [Required] Service principal secrets. + :ivar secrets: [Required] Service principal secrets. Required. :vartype secrets: ~azure.mgmt.machinelearningservices.models.CertificateDatastoreSecrets - :ivar tenant_id: Required. [Required] ID of the tenant to which the service principal belongs. + :ivar tenant_id: [Required] ID of the tenant to which the service principal belongs. Required. :vartype tenant_id: str - :ivar thumbprint: Required. [Required] Thumbprint of the certificate used for authentication. + :ivar thumbprint: [Required] Thumbprint of the certificate used for authentication. Required. :vartype thumbprint: str """ _validation = { - 'credentials_type': {'required': True}, - 'client_id': {'required': True}, - 'secrets': {'required': True}, - 'tenant_id': {'required': True}, - 'thumbprint': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "credentials_type": {"required": True}, + "client_id": {"required": True}, + "secrets": {"required": True}, + "tenant_id": {"required": True}, + "thumbprint": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'authority_url': {'key': 'authorityUrl', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'resource_url': {'key': 'resourceUrl', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'CertificateDatastoreSecrets'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'thumbprint': {'key': 'thumbprint', 'type': 'str'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, + "authority_url": {"key": "authorityUrl", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + "resource_url": {"key": "resourceUrl", "type": "str"}, + "secrets": {"key": "secrets", "type": "CertificateDatastoreSecrets"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "thumbprint": {"key": "thumbprint", "type": "str"}, } def __init__( self, *, client_id: str, - secrets: "CertificateDatastoreSecrets", + secrets: "_models.CertificateDatastoreSecrets", tenant_id: str, thumbprint: str, authority_url: Optional[str] = None, resource_url: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword authority_url: Authority URL used for authentication. :paramtype authority_url: str - :keyword client_id: Required. [Required] Service principal client ID. + :keyword client_id: [Required] Service principal client ID. Required. :paramtype client_id: str :keyword resource_url: Resource the service principal has access to. :paramtype resource_url: str - :keyword secrets: Required. [Required] Service principal secrets. + :keyword secrets: [Required] Service principal secrets. Required. :paramtype secrets: ~azure.mgmt.machinelearningservices.models.CertificateDatastoreSecrets - :keyword tenant_id: Required. [Required] ID of the tenant to which the service principal - belongs. + :keyword tenant_id: [Required] ID of the tenant to which the service principal belongs. + Required. :paramtype tenant_id: str - :keyword thumbprint: Required. [Required] Thumbprint of the certificate used for - authentication. + :keyword thumbprint: [Required] Thumbprint of the certificate used for authentication. + Required. :paramtype thumbprint: str """ - super(CertificateDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'Certificate' # type: str + super().__init__(**kwargs) + self.credentials_type: str = "Certificate" self.authority_url = authority_url self.client_id = client_id self.resource_url = resource_url @@ -5358,40 +5268,36 @@ class CertificateDatastoreSecrets(DatastoreSecrets): All required parameters must be populated in order to send to Azure. - :ivar secrets_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", "Sas", - "ServicePrincipal", "KerberosPassword", "KerberosKeytab". + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType :ivar certificate: Service principal certificate. :vartype certificate: str """ _validation = { - 'secrets_type': {'required': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'certificate': {'key': 'certificate', 'type': 'str'}, + "secrets_type": {"key": "secretsType", "type": "str"}, + "certificate": {"key": "certificate", "type": "str"}, } - def __init__( - self, - *, - certificate: Optional[str] = None, - **kwargs - ): + def __init__(self, *, certificate: Optional[str] = None, **kwargs: Any) -> None: """ :keyword certificate: Service principal certificate. :paramtype certificate: str """ - super(CertificateDatastoreSecrets, self).__init__(**kwargs) - self.secrets_type = 'Certificate' # type: str + super().__init__(**kwargs) + self.secrets_type: str = "Certificate" self.certificate = certificate -class TableVertical(msrest.serialization.Model): - """Abstract class for AutoML tasks that use table dataset as input - such as Classification/Regression/Forecasting. +class TableVertical(_serialization.Model): # pylint: disable=too-many-instance-attributes + """Abstract class for AutoML tasks that use table dataset as input - such as + Classification/Regression/Forecasting. :ivar cv_split_column_names: Columns to use for CVSplit data. :vartype cv_split_column_names: list[str] @@ -5431,37 +5337,37 @@ class TableVertical(msrest.serialization.Model): """ _attribute_map = { - 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'TableFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, - 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, - 'search_space': {'key': 'searchSpace', 'type': '[TableParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'TableSweepSettings'}, - 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, - 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, + "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, + "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, + "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, + "test_data": {"key": "testData", "type": "MLTableJobInput"}, + "test_data_size": {"key": "testDataSize", "type": "float"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "weight_column_name": {"key": "weightColumnName", "type": "str"}, } def __init__( self, *, cv_split_column_names: Optional[List[str]] = None, - featurization_settings: Optional["TableVerticalFeaturizationSettings"] = None, - fixed_parameters: Optional["TableFixedParameters"] = None, - limit_settings: Optional["TableVerticalLimitSettings"] = None, - n_cross_validations: Optional["NCrossValidations"] = None, - search_space: Optional[List["TableParameterSubspace"]] = None, - sweep_settings: Optional["TableSweepSettings"] = None, - test_data: Optional["MLTableJobInput"] = None, + featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.TableFixedParameters"] = None, + limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, + n_cross_validations: Optional["_models.NCrossValidations"] = None, + search_space: Optional[List["_models.TableParameterSubspace"]] = None, + sweep_settings: Optional["_models.TableSweepSettings"] = None, + test_data: Optional["_models.MLTableJobInput"] = None, test_data_size: Optional[float] = None, - validation_data: Optional["MLTableJobInput"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, validation_data_size: Optional[float] = None, weight_column_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword cv_split_column_names: Columns to use for CVSplit data. :paramtype cv_split_column_names: list[str] @@ -5502,7 +5408,7 @@ def __init__( weighted column as an input, causing rows in the data to be weighted up or down. :paramtype weight_column_name: str """ - super(TableVertical, self).__init__(**kwargs) + super().__init__(**kwargs) self.cv_split_column_names = cv_split_column_names self.featurization_settings = featurization_settings self.fixed_parameters = fixed_parameters @@ -5517,11 +5423,24 @@ def __init__( self.weight_column_name = weight_column_name -class Classification(AutoMLVertical, TableVertical): +class Classification(TableVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes """Classification task in AutoML Table vertical. All required parameters must be populated in order to send to Azure. + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar cv_split_column_names: Columns to use for CVSplit data. :vartype cv_split_column_names: list[str] :ivar featurization_settings: Featurization inputs needed for AutoML job. @@ -5557,23 +5476,10 @@ class Classification(AutoMLVertical, TableVertical): :ivar weight_column_name: The name of the sample weight column. Automated ML supports a weighted column as an input, causing rows in the data to be weighted up or down. :vartype weight_column_name: str - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar positive_label: Positive label for binary metrics calculation. :vartype positive_label: str - :ivar primary_metric: Primary metric for the task. Possible values include: "AUCWeighted", - "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted". + :ivar primary_metric: Primary metric for the task. Known values are: "AUCWeighted", "Accuracy", + "NormMacroRecall", "AveragePrecisionScoreWeighted", and "PrecisionScoreWeighted". :vartype primary_metric: str or ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics :ivar training_settings: Inputs for training phase for an AutoML Job. @@ -5582,56 +5488,64 @@ class Classification(AutoMLVertical, TableVertical): """ _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'TableFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, - 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, - 'search_space': {'key': 'searchSpace', 'type': '[TableParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'TableSweepSettings'}, - 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, - 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'positive_label': {'key': 'positiveLabel', 'type': 'str'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - 'training_settings': {'key': 'trainingSettings', 'type': 'ClassificationTrainingSettings'}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, + "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, + "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, + "test_data": {"key": "testData", "type": "MLTableJobInput"}, + "test_data_size": {"key": "testDataSize", "type": "float"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "weight_column_name": {"key": "weightColumnName", "type": "str"}, + "positive_label": {"key": "positiveLabel", "type": "str"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + "training_settings": {"key": "trainingSettings", "type": "ClassificationTrainingSettings"}, } def __init__( self, *, - training_data: "MLTableJobInput", + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, cv_split_column_names: Optional[List[str]] = None, - featurization_settings: Optional["TableVerticalFeaturizationSettings"] = None, - fixed_parameters: Optional["TableFixedParameters"] = None, - limit_settings: Optional["TableVerticalLimitSettings"] = None, - n_cross_validations: Optional["NCrossValidations"] = None, - search_space: Optional[List["TableParameterSubspace"]] = None, - sweep_settings: Optional["TableSweepSettings"] = None, - test_data: Optional["MLTableJobInput"] = None, + featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.TableFixedParameters"] = None, + limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, + n_cross_validations: Optional["_models.NCrossValidations"] = None, + search_space: Optional[List["_models.TableParameterSubspace"]] = None, + sweep_settings: Optional["_models.TableSweepSettings"] = None, + test_data: Optional["_models.MLTableJobInput"] = None, test_data_size: Optional[float] = None, - validation_data: Optional["MLTableJobInput"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, validation_data_size: Optional[float] = None, weight_column_name: Optional[str] = None, - log_verbosity: Optional[Union[str, "LogVerbosity"]] = None, - target_column_name: Optional[str] = None, positive_label: Optional[str] = None, - primary_metric: Optional[Union[str, "ClassificationPrimaryMetrics"]] = None, - training_settings: Optional["ClassificationTrainingSettings"] = None, - **kwargs - ): + primary_metric: Optional[Union[str, "_models.ClassificationPrimaryMetrics"]] = None, + training_settings: Optional["_models.ClassificationTrainingSettings"] = None, + **kwargs: Any + ) -> None: """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :keyword cv_split_column_names: Columns to use for CVSplit data. :paramtype cv_split_column_names: list[str] :keyword featurization_settings: Featurization inputs needed for AutoML job. @@ -5670,25 +5584,41 @@ def __init__( :keyword weight_column_name: The name of the sample weight column. Automated ML supports a weighted column as an input, causing rows in the data to be weighted up or down. :paramtype weight_column_name: str - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :keyword positive_label: Positive label for binary metrics calculation. :paramtype positive_label: str - :keyword primary_metric: Primary metric for the task. Possible values include: "AUCWeighted", - "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted". + :keyword primary_metric: Primary metric for the task. Known values are: "AUCWeighted", + "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and "PrecisionScoreWeighted". :paramtype primary_metric: str or ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics :keyword training_settings: Inputs for training phase for an AutoML Job. :paramtype training_settings: ~azure.mgmt.machinelearningservices.models.ClassificationTrainingSettings """ - super(Classification, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, cv_split_column_names=cv_split_column_names, featurization_settings=featurization_settings, fixed_parameters=fixed_parameters, limit_settings=limit_settings, n_cross_validations=n_cross_validations, search_space=search_space, sweep_settings=sweep_settings, test_data=test_data, test_data_size=test_data_size, validation_data=validation_data, validation_data_size=validation_data_size, weight_column_name=weight_column_name, **kwargs) + super().__init__( + cv_split_column_names=cv_split_column_names, + featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, + limit_settings=limit_settings, + n_cross_validations=n_cross_validations, + search_space=search_space, + sweep_settings=sweep_settings, + test_data=test_data, + test_data_size=test_data_size, + validation_data=validation_data, + validation_data_size=validation_data_size, + weight_column_name=weight_column_name, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type: str = "Classification" + self.training_data = training_data + self.positive_label = positive_label + self.primary_metric = primary_metric + self.training_settings = training_settings self.cv_split_column_names = cv_split_column_names self.featurization_settings = featurization_settings self.fixed_parameters = fixed_parameters @@ -5701,25 +5631,18 @@ def __init__( self.validation_data = validation_data self.validation_data_size = validation_data_size self.weight_column_name = weight_column_name - self.task_type = 'Classification' # type: str - self.positive_label = positive_label - self.primary_metric = primary_metric - self.training_settings = training_settings - self.log_verbosity = log_verbosity - self.target_column_name = target_column_name - self.training_data = training_data -class ModelPerformanceMetricThresholdBase(msrest.serialization.Model): +class ModelPerformanceMetricThresholdBase(_serialization.Model): """ModelPerformanceMetricThresholdBase. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: ClassificationModelPerformanceMetricThreshold, RegressionModelPerformanceMetricThreshold. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + ClassificationModelPerformanceMetricThreshold, RegressionModelPerformanceMetricThreshold All required parameters must be populated in order to send to Azure. - :ivar model_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Classification", "Regression". + :ivar model_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Classification" and "Regression". :vartype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType :ivar threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -5727,31 +5650,29 @@ class ModelPerformanceMetricThresholdBase(msrest.serialization.Model): """ _validation = { - 'model_type': {'required': True}, + "model_type": {"required": True}, } _attribute_map = { - 'model_type': {'key': 'modelType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "model_type": {"key": "modelType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } _subtype_map = { - 'model_type': {'Classification': 'ClassificationModelPerformanceMetricThreshold', 'Regression': 'RegressionModelPerformanceMetricThreshold'} + "model_type": { + "Classification": "ClassificationModelPerformanceMetricThreshold", + "Regression": "RegressionModelPerformanceMetricThreshold", + } } - def __init__( - self, - *, - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + def __init__(self, *, threshold: Optional["_models.MonitoringThreshold"] = None, **kwargs: Any) -> None: """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ - super(ModelPerformanceMetricThresholdBase, self).__init__(**kwargs) - self.model_type = None # type: Optional[str] + super().__init__(**kwargs) + self.model_type: Optional[str] = None self.threshold = threshold @@ -5760,51 +5681,51 @@ class ClassificationModelPerformanceMetricThreshold(ModelPerformanceMetricThresh All required parameters must be populated in order to send to Azure. - :ivar model_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Classification", "Regression". + :ivar model_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Classification" and "Regression". :vartype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType :ivar threshold: The threshold value. If null, a default value will be set depending on the selected metric. :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The classification model performance to calculate. Possible - values include: "Accuracy", "Precision", "Recall". + :ivar metric: [Required] The classification model performance to calculate. Required. Known + values are: "Accuracy", "Precision", and "Recall". :vartype metric: str or ~azure.mgmt.machinelearningservices.models.ClassificationModelPerformanceMetric """ _validation = { - 'model_type': {'required': True}, - 'metric': {'required': True}, + "model_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'model_type': {'key': 'modelType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "model_type": {"key": "modelType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } def __init__( self, *, - metric: Union[str, "ClassificationModelPerformanceMetric"], - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + metric: Union[str, "_models.ClassificationModelPerformanceMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The classification model performance to calculate. - Possible values include: "Accuracy", "Precision", "Recall". + :keyword metric: [Required] The classification model performance to calculate. Required. Known + values are: "Accuracy", "Precision", and "Recall". :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.ClassificationModelPerformanceMetric """ - super(ClassificationModelPerformanceMetricThreshold, self).__init__(threshold=threshold, **kwargs) - self.model_type = 'Classification' # type: str + super().__init__(threshold=threshold, **kwargs) + self.model_type: str = "Classification" self.metric = metric -class TrainingSettings(msrest.serialization.Model): +class TrainingSettings(_serialization.Model): """Training related configuration. :ivar enable_dnn_training: Enable recommendation of DNN models. @@ -5829,35 +5750,35 @@ class TrainingSettings(msrest.serialization.Model): mode selection. Default is 'auto'. If 'Distributed' then only distributed featurization is used and distributed algorithms are chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". :vartype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode """ _attribute_map = { - 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, - 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, - 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, - 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, - 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, - 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, - 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, - 'training_mode': {'key': 'trainingMode', 'type': 'str'}, + "enable_dnn_training": {"key": "enableDnnTraining", "type": "bool"}, + "enable_model_explainability": {"key": "enableModelExplainability", "type": "bool"}, + "enable_onnx_compatible_models": {"key": "enableOnnxCompatibleModels", "type": "bool"}, + "enable_stack_ensemble": {"key": "enableStackEnsemble", "type": "bool"}, + "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, + "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, + "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "training_mode": {"key": "trainingMode", "type": "str"}, } def __init__( self, *, - enable_dnn_training: Optional[bool] = False, - enable_model_explainability: Optional[bool] = True, - enable_onnx_compatible_models: Optional[bool] = False, - enable_stack_ensemble: Optional[bool] = True, - enable_vote_ensemble: Optional[bool] = True, - ensemble_model_download_timeout: Optional[datetime.timedelta] = "PT5M", - stack_ensemble_settings: Optional["StackEnsembleSettings"] = None, - training_mode: Optional[Union[str, "TrainingMode"]] = None, - **kwargs - ): + enable_dnn_training: bool = False, + enable_model_explainability: bool = True, + enable_onnx_compatible_models: bool = False, + enable_stack_ensemble: bool = True, + enable_vote_ensemble: bool = True, + ensemble_model_download_timeout: datetime.timedelta = "PT5M", + stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + training_mode: Optional[Union[str, "_models.TrainingMode"]] = None, + **kwargs: Any + ) -> None: """ :keyword enable_dnn_training: Enable recommendation of DNN models. :paramtype enable_dnn_training: bool @@ -5881,11 +5802,11 @@ def __init__( mode selection. Default is 'auto'. If 'Distributed' then only distributed featurization is used and distributed algorithms are chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". :paramtype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode """ - super(TrainingSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.enable_dnn_training = enable_dnn_training self.enable_model_explainability = enable_model_explainability self.enable_onnx_compatible_models = enable_onnx_compatible_models @@ -5921,8 +5842,8 @@ class ClassificationTrainingSettings(TrainingSettings): mode selection. Default is 'auto'. If 'Distributed' then only distributed featurization is used and distributed algorithms are chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". :vartype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode :ivar allowed_training_algorithms: Allowed models for classification task. :vartype allowed_training_algorithms: list[str or @@ -5933,33 +5854,33 @@ class ClassificationTrainingSettings(TrainingSettings): """ _attribute_map = { - 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, - 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, - 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, - 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, - 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, - 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, - 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, - 'training_mode': {'key': 'trainingMode', 'type': 'str'}, - 'allowed_training_algorithms': {'key': 'allowedTrainingAlgorithms', 'type': '[str]'}, - 'blocked_training_algorithms': {'key': 'blockedTrainingAlgorithms', 'type': '[str]'}, + "enable_dnn_training": {"key": "enableDnnTraining", "type": "bool"}, + "enable_model_explainability": {"key": "enableModelExplainability", "type": "bool"}, + "enable_onnx_compatible_models": {"key": "enableOnnxCompatibleModels", "type": "bool"}, + "enable_stack_ensemble": {"key": "enableStackEnsemble", "type": "bool"}, + "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, + "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, + "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "training_mode": {"key": "trainingMode", "type": "str"}, + "allowed_training_algorithms": {"key": "allowedTrainingAlgorithms", "type": "[str]"}, + "blocked_training_algorithms": {"key": "blockedTrainingAlgorithms", "type": "[str]"}, } def __init__( self, *, - enable_dnn_training: Optional[bool] = False, - enable_model_explainability: Optional[bool] = True, - enable_onnx_compatible_models: Optional[bool] = False, - enable_stack_ensemble: Optional[bool] = True, - enable_vote_ensemble: Optional[bool] = True, - ensemble_model_download_timeout: Optional[datetime.timedelta] = "PT5M", - stack_ensemble_settings: Optional["StackEnsembleSettings"] = None, - training_mode: Optional[Union[str, "TrainingMode"]] = None, - allowed_training_algorithms: Optional[List[Union[str, "ClassificationModels"]]] = None, - blocked_training_algorithms: Optional[List[Union[str, "ClassificationModels"]]] = None, - **kwargs - ): + enable_dnn_training: bool = False, + enable_model_explainability: bool = True, + enable_onnx_compatible_models: bool = False, + enable_stack_ensemble: bool = True, + enable_vote_ensemble: bool = True, + ensemble_model_download_timeout: datetime.timedelta = "PT5M", + stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + training_mode: Optional[Union[str, "_models.TrainingMode"]] = None, + allowed_training_algorithms: Optional[List[Union[str, "_models.ClassificationModels"]]] = None, + blocked_training_algorithms: Optional[List[Union[str, "_models.ClassificationModels"]]] = None, + **kwargs: Any + ) -> None: """ :keyword enable_dnn_training: Enable recommendation of DNN models. :paramtype enable_dnn_training: bool @@ -5983,8 +5904,8 @@ def __init__( mode selection. Default is 'auto'. If 'Distributed' then only distributed featurization is used and distributed algorithms are chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". :paramtype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode :keyword allowed_training_algorithms: Allowed models for classification task. :paramtype allowed_training_algorithms: list[str or @@ -5993,12 +5914,22 @@ def __init__( :paramtype blocked_training_algorithms: list[str or ~azure.mgmt.machinelearningservices.models.ClassificationModels] """ - super(ClassificationTrainingSettings, self).__init__(enable_dnn_training=enable_dnn_training, enable_model_explainability=enable_model_explainability, enable_onnx_compatible_models=enable_onnx_compatible_models, enable_stack_ensemble=enable_stack_ensemble, enable_vote_ensemble=enable_vote_ensemble, ensemble_model_download_timeout=ensemble_model_download_timeout, stack_ensemble_settings=stack_ensemble_settings, training_mode=training_mode, **kwargs) + super().__init__( + enable_dnn_training=enable_dnn_training, + enable_model_explainability=enable_model_explainability, + enable_onnx_compatible_models=enable_onnx_compatible_models, + enable_stack_ensemble=enable_stack_ensemble, + enable_vote_ensemble=enable_vote_ensemble, + ensemble_model_download_timeout=ensemble_model_download_timeout, + stack_ensemble_settings=stack_ensemble_settings, + training_mode=training_mode, + **kwargs + ) self.allowed_training_algorithms = allowed_training_algorithms self.blocked_training_algorithms = blocked_training_algorithms -class ClusterUpdateParameters(msrest.serialization.Model): +class ClusterUpdateParameters(_serialization.Model): """AmlCompute update parameters. :ivar properties: Properties of ClusterUpdate. @@ -6006,28 +5937,23 @@ class ClusterUpdateParameters(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties.properties', 'type': 'ScaleSettingsInformation'}, + "properties": {"key": "properties.properties", "type": "ScaleSettingsInformation"}, } - def __init__( - self, - *, - properties: Optional["ScaleSettingsInformation"] = None, - **kwargs - ): + def __init__(self, *, properties: Optional["_models.ScaleSettingsInformation"] = None, **kwargs: Any) -> None: """ :keyword properties: Properties of ClusterUpdate. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ScaleSettingsInformation """ - super(ClusterUpdateParameters, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class ExportSummary(msrest.serialization.Model): +class ExportSummary(_serialization.Model): """ExportSummary. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CsvExportSummary, CocoExportSummary, DatasetExportSummary. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CsvExportSummary, CocoExportSummary, DatasetExportSummary Variables are only populated by the server, and will be ignored when sending a request. @@ -6036,9 +5962,9 @@ class ExportSummary(msrest.serialization.Model): :ivar end_date_time: The time when the export was completed. :vartype end_date_time: ~datetime.datetime :ivar exported_row_count: The total number of labeled datapoints exported. - :vartype exported_row_count: long - :ivar format: Required. [Required] The format of exported labels, also as the - discriminator.Constant filled by server. Possible values include: "Dataset", "Coco", "CSV". + :vartype exported_row_count: int + :ivar format: [Required] The format of exported labels, also as the discriminator. Required. + Known values are: "Dataset", "Coco", and "CSV". :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType :ivar labeling_job_id: Name and identifier of the job containing exported labels. :vartype labeling_job_id: str @@ -6047,35 +5973,31 @@ class ExportSummary(msrest.serialization.Model): """ _validation = { - 'end_date_time': {'readonly': True}, - 'exported_row_count': {'readonly': True}, - 'format': {'required': True}, - 'labeling_job_id': {'readonly': True}, - 'start_date_time': {'readonly': True}, + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, } _attribute_map = { - 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, - 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, - 'format': {'key': 'format', 'type': 'str'}, - 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, - 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "int"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, } _subtype_map = { - 'format': {'CSV': 'CsvExportSummary', 'Coco': 'CocoExportSummary', 'Dataset': 'DatasetExportSummary'} + "format": {"CSV": "CsvExportSummary", "Coco": "CocoExportSummary", "Dataset": "DatasetExportSummary"} } - def __init__( - self, - **kwargs - ): - """ - """ - super(ExportSummary, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.end_date_time = None self.exported_row_count = None - self.format = None # type: Optional[str] + self.format: Optional[str] = None self.labeling_job_id = None self.start_date_time = None @@ -6090,9 +6012,9 @@ class CocoExportSummary(ExportSummary): :ivar end_date_time: The time when the export was completed. :vartype end_date_time: ~datetime.datetime :ivar exported_row_count: The total number of labeled datapoints exported. - :vartype exported_row_count: long - :ivar format: Required. [Required] The format of exported labels, also as the - discriminator.Constant filled by server. Possible values include: "Dataset", "Coco", "CSV". + :vartype exported_row_count: int + :ivar format: [Required] The format of exported labels, also as the discriminator. Required. + Known values are: "Dataset", "Coco", and "CSV". :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType :ivar labeling_job_id: Name and identifier of the job containing exported labels. :vartype labeling_job_id: str @@ -6105,71 +6027,61 @@ class CocoExportSummary(ExportSummary): """ _validation = { - 'end_date_time': {'readonly': True}, - 'exported_row_count': {'readonly': True}, - 'format': {'required': True}, - 'labeling_job_id': {'readonly': True}, - 'start_date_time': {'readonly': True}, - 'container_name': {'readonly': True}, - 'snapshot_path': {'readonly': True}, + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, + "container_name": {"readonly": True}, + "snapshot_path": {"readonly": True}, } _attribute_map = { - 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, - 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, - 'format': {'key': 'format', 'type': 'str'}, - 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, - 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, - 'container_name': {'key': 'containerName', 'type': 'str'}, - 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'}, + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "int"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, + "container_name": {"key": "containerName", "type": "str"}, + "snapshot_path": {"key": "snapshotPath", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(CocoExportSummary, self).__init__(**kwargs) - self.format = 'Coco' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.format: str = "Coco" self.container_name = None self.snapshot_path = None -class CodeConfiguration(msrest.serialization.Model): +class CodeConfiguration(_serialization.Model): """Configuration for a scoring code asset. All required parameters must be populated in order to send to Azure. :ivar code_id: ARM resource ID of the code asset. :vartype code_id: str - :ivar scoring_script: Required. [Required] The script to execute on startup. eg. "score.py". + :ivar scoring_script: [Required] The script to execute on startup. eg. "score.py". Required. :vartype scoring_script: str """ _validation = { - 'scoring_script': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "scoring_script": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'code_id': {'key': 'codeId', 'type': 'str'}, - 'scoring_script': {'key': 'scoringScript', 'type': 'str'}, + "code_id": {"key": "codeId", "type": "str"}, + "scoring_script": {"key": "scoringScript", "type": "str"}, } - def __init__( - self, - *, - scoring_script: str, - code_id: Optional[str] = None, - **kwargs - ): + def __init__(self, *, scoring_script: str, code_id: Optional[str] = None, **kwargs: Any) -> None: """ :keyword code_id: ARM resource ID of the code asset. :paramtype code_id: str - :keyword scoring_script: Required. [Required] The script to execute on startup. eg. "score.py". + :keyword scoring_script: [Required] The script to execute on startup. eg. "score.py". Required. :paramtype scoring_script: str """ - super(CodeConfiguration, self).__init__(**kwargs) + super().__init__(**kwargs) self.code_id = code_id self.scoring_script = scoring_script @@ -6192,37 +6104,32 @@ class CodeContainer(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.CodeContainerProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'CodeContainerProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "CodeContainerProperties"}, } - def __init__( - self, - *, - properties: "CodeContainerProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.CodeContainerProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.CodeContainerProperties """ - super(CodeContainer, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties @@ -6235,7 +6142,7 @@ class CodeContainerProperties(AssetContainer): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar is_archived: Is the asset archived?. :vartype is_archived: bool @@ -6243,26 +6150,26 @@ class CodeContainerProperties(AssetContainer): :vartype latest_version: str :ivar next_version: The next auto incremental version. :vartype next_version: str - :ivar provisioning_state: Provisioning state for the code container. Possible values include: - "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :ivar provisioning_state: Provisioning state for the code container. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.AssetProvisioningState """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } def __init__( @@ -6271,24 +6178,24 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - is_archived: Optional[bool] = False, - **kwargs - ): + is_archived: bool = False, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword is_archived: Is the asset archived?. :paramtype is_archived: bool """ - super(CodeContainerProperties, self).__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) self.provisioning_state = None -class CodeContainerResourceArmPaginatedResult(msrest.serialization.Model): +class CodeContainerResourceArmPaginatedResult(_serialization.Model): """A paginated list of CodeContainer entities. :ivar next_link: The link to the next page of CodeContainer objects. If null, there are no @@ -6299,17 +6206,13 @@ class CodeContainerResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[CodeContainer]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[CodeContainer]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["CodeContainer"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.CodeContainer"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of CodeContainer objects. If null, there are no additional pages. @@ -6317,7 +6220,7 @@ def __init__( :keyword value: An array of objects of type CodeContainer. :paramtype value: list[~azure.mgmt.machinelearningservices.models.CodeContainer] """ - super(CodeContainerResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value @@ -6340,37 +6243,32 @@ class CodeVersion(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.CodeVersionProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'CodeVersionProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "CodeVersionProperties"}, } - def __init__( - self, - *, - properties: "CodeVersionProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.CodeVersionProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.CodeVersionProperties """ - super(CodeVersion, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties @@ -6383,7 +6281,7 @@ class CodeVersionProperties(AssetBase): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -6395,25 +6293,25 @@ class CodeVersionProperties(AssetBase): :vartype is_archived: bool :ivar code_uri: Uri where code is located. :vartype code_uri: str - :ivar provisioning_state: Provisioning state for the code version. Possible values include: - "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :ivar provisioning_state: Provisioning state for the code version. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.AssetProvisioningState """ _validation = { - 'provisioning_state': {'readonly': True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'code_uri': {'key': 'codeUri', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "code_uri": {"key": "codeUri", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } def __init__( @@ -6422,18 +6320,18 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - is_anonymous: Optional[bool] = False, - is_archived: Optional[bool] = False, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, code_uri: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -6446,12 +6344,20 @@ def __init__( :keyword code_uri: Uri where code is located. :paramtype code_uri: str """ - super(CodeVersionProperties, self).__init__(description=description, properties=properties, tags=tags, auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs) + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) self.code_uri = code_uri self.provisioning_state = None -class CodeVersionResourceArmPaginatedResult(msrest.serialization.Model): +class CodeVersionResourceArmPaginatedResult(_serialization.Model): """A paginated list of CodeVersion entities. :ivar next_link: The link to the next page of CodeVersion objects. If null, there are no @@ -6462,17 +6368,13 @@ class CodeVersionResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[CodeVersion]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[CodeVersion]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["CodeVersion"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.CodeVersion"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of CodeVersion objects. If null, there are no additional pages. @@ -6480,19 +6382,19 @@ def __init__( :keyword value: An array of objects of type CodeVersion. :paramtype value: list[~azure.mgmt.machinelearningservices.models.CodeVersion] """ - super(CodeVersionResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class Collection(msrest.serialization.Model): +class Collection(_serialization.Model): """Collection. :ivar client_id: The msi client id used to collect logging to blob storage. If it's null,backend will pick a registered endpoint identity to auth. :vartype client_id: str - :ivar data_collection_mode: Enable or disable data collection. Possible values include: - "Enabled", "Disabled". + :ivar data_collection_mode: Enable or disable data collection. Known values are: "Enabled" and + "Disabled". :vartype data_collection_mode: str or ~azure.mgmt.machinelearningservices.models.DataCollectionMode :ivar data_id: The data asset arm resource id. Client side will ensure data asset is pointing @@ -6504,27 +6406,27 @@ class Collection(msrest.serialization.Model): """ _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'data_collection_mode': {'key': 'dataCollectionMode', 'type': 'str'}, - 'data_id': {'key': 'dataId', 'type': 'str'}, - 'sampling_rate': {'key': 'samplingRate', 'type': 'float'}, + "client_id": {"key": "clientId", "type": "str"}, + "data_collection_mode": {"key": "dataCollectionMode", "type": "str"}, + "data_id": {"key": "dataId", "type": "str"}, + "sampling_rate": {"key": "samplingRate", "type": "float"}, } def __init__( self, *, client_id: Optional[str] = None, - data_collection_mode: Optional[Union[str, "DataCollectionMode"]] = None, + data_collection_mode: Optional[Union[str, "_models.DataCollectionMode"]] = None, data_id: Optional[str] = None, - sampling_rate: Optional[float] = 1, - **kwargs - ): + sampling_rate: float = 1, + **kwargs: Any + ) -> None: """ :keyword client_id: The msi client id used to collect logging to blob storage. If it's null,backend will pick a registered endpoint identity to auth. :paramtype client_id: str - :keyword data_collection_mode: Enable or disable data collection. Possible values include: - "Enabled", "Disabled". + :keyword data_collection_mode: Enable or disable data collection. Known values are: "Enabled" + and "Disabled". :paramtype data_collection_mode: str or ~azure.mgmt.machinelearningservices.models.DataCollectionMode :keyword data_id: The data asset arm resource id. Client side will ensure data asset is @@ -6534,48 +6436,42 @@ def __init__( 100% of data by default. :paramtype sampling_rate: float """ - super(Collection, self).__init__(**kwargs) + super().__init__(**kwargs) self.client_id = client_id self.data_collection_mode = data_collection_mode self.data_id = data_id self.sampling_rate = sampling_rate -class ColumnTransformer(msrest.serialization.Model): +class ColumnTransformer(_serialization.Model): """Column transformer parameters. :ivar fields: Fields to apply transformer logic on. :vartype fields: list[str] :ivar parameters: Different properties to be passed to transformer. Input expected is dictionary of key,value pairs in JSON format. - :vartype parameters: any + :vartype parameters: JSON """ _attribute_map = { - 'fields': {'key': 'fields', 'type': '[str]'}, - 'parameters': {'key': 'parameters', 'type': 'object'}, + "fields": {"key": "fields", "type": "[str]"}, + "parameters": {"key": "parameters", "type": "object"}, } - def __init__( - self, - *, - fields: Optional[List[str]] = None, - parameters: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, fields: Optional[List[str]] = None, parameters: Optional[JSON] = None, **kwargs: Any) -> None: """ :keyword fields: Fields to apply transformer logic on. :paramtype fields: list[str] :keyword parameters: Different properties to be passed to transformer. Input expected is dictionary of key,value pairs in JSON format. - :paramtype parameters: any + :paramtype parameters: JSON """ - super(ColumnTransformer, self).__init__(**kwargs) + super().__init__(**kwargs) self.fields = fields self.parameters = parameters -class CommandJob(JobBaseProperties): +class CommandJob(JobBaseProperties): # pylint: disable=too-many-instance-attributes """Command job definition. Variables are only populated by the server, and will be ignored when sending a request. @@ -6586,7 +6482,7 @@ class CommandJob(JobBaseProperties): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar component_id: ARM resource ID of the component resource. :vartype component_id: str @@ -6603,8 +6499,8 @@ class CommandJob(JobBaseProperties): :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration :ivar is_archived: Is the asset archived?. :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType :ivar notification_setting: Notification setting for the job. :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting @@ -6614,23 +6510,23 @@ class CommandJob(JobBaseProperties): :ivar services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus :ivar autologger_settings: Distribution configuration of the job. If set, this should be one of Mpi, Tensorflow, PyTorch, or null. :vartype autologger_settings: ~azure.mgmt.machinelearningservices.models.AutologgerSettings :ivar code_id: ARM resource ID of the code asset. :vartype code_id: str - :ivar command: Required. [Required] The command to execute on startup of the job. eg. "python - train.py". + :ivar command: [Required] The command to execute on startup of the job. eg. "python train.py". + Required. :vartype command: str :ivar distribution: Distribution configuration of the job. If set, this should be one of Mpi, Tensorflow, PyTorch, Ray, or null. :vartype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration - :ivar environment_id: Required. [Required] The ARM resource ID of the Environment specification - for the job. + :ivar environment_id: [Required] The ARM resource ID of the Environment specification for the + job. Required. :vartype environment_id: str :ivar environment_variables: Environment variables included in the job. :vartype environment_variables: dict[str, str] @@ -6641,7 +6537,7 @@ class CommandJob(JobBaseProperties): :ivar outputs: Mapping of output data bindings used in the job. :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] :ivar parameters: Input parameters. - :vartype parameters: any + :vartype parameters: JSON :ivar queue_settings: Queue settings for the job. :vartype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings :ivar resources: Compute Resource configuration for the job. @@ -6649,43 +6545,43 @@ class CommandJob(JobBaseProperties): """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'environment_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'parameters': {'readonly': True}, + "job_type": {"required": True}, + "status": {"readonly": True}, + "command": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "environment_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "parameters": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'autologger_settings': {'key': 'autologgerSettings', 'type': 'AutologgerSettings'}, - 'code_id': {'key': 'codeId', 'type': 'str'}, - 'command': {'key': 'command', 'type': 'str'}, - 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'limits': {'key': 'limits', 'type': 'CommandJobLimits'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'parameters': {'key': 'parameters', 'type': 'object'}, - 'queue_settings': {'key': 'queueSettings', 'type': 'QueueSettings'}, - 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "autologger_settings": {"key": "autologgerSettings", "type": "AutologgerSettings"}, + "code_id": {"key": "codeId", "type": "str"}, + "command": {"key": "command", "type": "str"}, + "distribution": {"key": "distribution", "type": "DistributionConfiguration"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "limits": {"key": "limits", "type": "CommandJobLimits"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "parameters": {"key": "parameters", "type": "object"}, + "queue_settings": {"key": "queueSettings", "type": "QueueSettings"}, + "resources": {"key": "resources", "type": "JobResourceConfiguration"}, } - def __init__( + def __init__( # pylint: disable=too-many-locals self, *, command: str, @@ -6696,29 +6592,29 @@ def __init__( component_id: Optional[str] = None, compute_id: Optional[str] = None, display_name: Optional[str] = None, - experiment_name: Optional[str] = "Default", - identity: Optional["IdentityConfiguration"] = None, - is_archived: Optional[bool] = False, - notification_setting: Optional["NotificationSetting"] = None, - secrets_configuration: Optional[Dict[str, "SecretConfiguration"]] = None, - services: Optional[Dict[str, "JobService"]] = None, - autologger_settings: Optional["AutologgerSettings"] = None, + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + notification_setting: Optional["_models.NotificationSetting"] = None, + secrets_configuration: Optional[Dict[str, "_models.SecretConfiguration"]] = None, + services: Optional[Dict[str, "_models.JobService"]] = None, + autologger_settings: Optional["_models.AutologgerSettings"] = None, code_id: Optional[str] = None, - distribution: Optional["DistributionConfiguration"] = None, + distribution: Optional["_models.DistributionConfiguration"] = None, environment_variables: Optional[Dict[str, str]] = None, - inputs: Optional[Dict[str, "JobInput"]] = None, - limits: Optional["CommandJobLimits"] = None, - outputs: Optional[Dict[str, "JobOutput"]] = None, - queue_settings: Optional["QueueSettings"] = None, - resources: Optional["JobResourceConfiguration"] = None, - **kwargs - ): + inputs: Optional[Dict[str, "_models.JobInput"]] = None, + limits: Optional["_models.CommandJobLimits"] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + queue_settings: Optional["_models.QueueSettings"] = None, + resources: Optional["_models.JobResourceConfiguration"] = None, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword component_id: ARM resource ID of the component resource. :paramtype component_id: str @@ -6748,14 +6644,14 @@ def __init__( :paramtype autologger_settings: ~azure.mgmt.machinelearningservices.models.AutologgerSettings :keyword code_id: ARM resource ID of the code asset. :paramtype code_id: str - :keyword command: Required. [Required] The command to execute on startup of the job. eg. - "python train.py". + :keyword command: [Required] The command to execute on startup of the job. eg. "python + train.py". Required. :paramtype command: str :keyword distribution: Distribution configuration of the job. If set, this should be one of Mpi, Tensorflow, PyTorch, Ray, or null. :paramtype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration - :keyword environment_id: Required. [Required] The ARM resource ID of the Environment - specification for the job. + :keyword environment_id: [Required] The ARM resource ID of the Environment specification for + the job. Required. :paramtype environment_id: str :keyword environment_variables: Environment variables included in the job. :paramtype environment_variables: dict[str, str] @@ -6770,8 +6666,22 @@ def __init__( :keyword resources: Compute Resource configuration for the job. :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration """ - super(CommandJob, self).__init__(description=description, properties=properties, tags=tags, component_id=component_id, compute_id=compute_id, display_name=display_name, experiment_name=experiment_name, identity=identity, is_archived=is_archived, notification_setting=notification_setting, secrets_configuration=secrets_configuration, services=services, **kwargs) - self.job_type = 'Command' # type: str + super().__init__( + description=description, + properties=properties, + tags=tags, + component_id=component_id, + compute_id=compute_id, + display_name=display_name, + experiment_name=experiment_name, + identity=identity, + is_archived=is_archived, + notification_setting=notification_setting, + secrets_configuration=secrets_configuration, + services=services, + **kwargs + ) + self.job_type: str = "Command" self.autologger_settings = autologger_settings self.code_id = code_id self.command = command @@ -6786,16 +6696,16 @@ def __init__( self.resources = resources -class JobLimits(msrest.serialization.Model): +class JobLimits(_serialization.Model): """JobLimits. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CommandJobLimits, SweepJobLimits. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CommandJobLimits, SweepJobLimits All required parameters must be populated in order to send to Azure. - :ivar job_limits_type: Required. [Required] JobLimit type.Constant filled by server. Possible - values include: "Command", "Sweep". + :ivar job_limits_type: [Required] JobLimit type. Required. Known values are: "Command" and + "Sweep". :vartype job_limits_type: str or ~azure.mgmt.machinelearningservices.models.JobLimitsType :ivar timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. Only supports duration with precision as low as Seconds. @@ -6803,31 +6713,24 @@ class JobLimits(msrest.serialization.Model): """ _validation = { - 'job_limits_type': {'required': True}, + "job_limits_type": {"required": True}, } _attribute_map = { - 'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, + "job_limits_type": {"key": "jobLimitsType", "type": "str"}, + "timeout": {"key": "timeout", "type": "duration"}, } - _subtype_map = { - 'job_limits_type': {'Command': 'CommandJobLimits', 'Sweep': 'SweepJobLimits'} - } + _subtype_map = {"job_limits_type": {"Command": "CommandJobLimits", "Sweep": "SweepJobLimits"}} - def __init__( - self, - *, - timeout: Optional[datetime.timedelta] = None, - **kwargs - ): + def __init__(self, *, timeout: Optional[datetime.timedelta] = None, **kwargs: Any) -> None: """ :keyword timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. Only supports duration with precision as low as Seconds. :paramtype timeout: ~datetime.timedelta """ - super(JobLimits, self).__init__(**kwargs) - self.job_limits_type = None # type: Optional[str] + super().__init__(**kwargs) + self.job_limits_type: Optional[str] = None self.timeout = timeout @@ -6836,8 +6739,8 @@ class CommandJobLimits(JobLimits): All required parameters must be populated in order to send to Azure. - :ivar job_limits_type: Required. [Required] JobLimit type.Constant filled by server. Possible - values include: "Command", "Sweep". + :ivar job_limits_type: [Required] JobLimit type. Required. Known values are: "Command" and + "Sweep". :vartype job_limits_type: str or ~azure.mgmt.machinelearningservices.models.JobLimitsType :ivar timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. Only supports duration with precision as low as Seconds. @@ -6845,51 +6748,41 @@ class CommandJobLimits(JobLimits): """ _validation = { - 'job_limits_type': {'required': True}, + "job_limits_type": {"required": True}, } _attribute_map = { - 'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, + "job_limits_type": {"key": "jobLimitsType", "type": "str"}, + "timeout": {"key": "timeout", "type": "duration"}, } - def __init__( - self, - *, - timeout: Optional[datetime.timedelta] = None, - **kwargs - ): + def __init__(self, *, timeout: Optional[datetime.timedelta] = None, **kwargs: Any) -> None: """ :keyword timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. Only supports duration with precision as low as Seconds. :paramtype timeout: ~datetime.timedelta """ - super(CommandJobLimits, self).__init__(timeout=timeout, **kwargs) - self.job_limits_type = 'Command' # type: str + super().__init__(timeout=timeout, **kwargs) + self.job_limits_type: str = "Command" -class ComponentConfiguration(msrest.serialization.Model): +class ComponentConfiguration(_serialization.Model): """Used for sweep over component. :ivar pipeline_settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. - :vartype pipeline_settings: any + :vartype pipeline_settings: JSON """ _attribute_map = { - 'pipeline_settings': {'key': 'pipelineSettings', 'type': 'object'}, + "pipeline_settings": {"key": "pipelineSettings", "type": "object"}, } - def __init__( - self, - *, - pipeline_settings: Optional[Any] = None, - **kwargs - ): + def __init__(self, *, pipeline_settings: Optional[JSON] = None, **kwargs: Any) -> None: """ :keyword pipeline_settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. - :paramtype pipeline_settings: any + :paramtype pipeline_settings: JSON """ - super(ComponentConfiguration, self).__init__(**kwargs) + super().__init__(**kwargs) self.pipeline_settings = pipeline_settings @@ -6911,37 +6804,32 @@ class ComponentContainer(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.ComponentContainerProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ComponentContainerProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ComponentContainerProperties"}, } - def __init__( - self, - *, - properties: "ComponentContainerProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.ComponentContainerProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComponentContainerProperties """ - super(ComponentContainer, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties @@ -6949,9 +6837,11 @@ class ComponentContainerProperties(AssetContainer): """Component container definition. -.. raw:: html + .. raw:: html - . + . Variables are only populated by the server, and will be ignored when sending a request. @@ -6959,7 +6849,7 @@ class ComponentContainerProperties(AssetContainer): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar is_archived: Is the asset archived?. :vartype is_archived: bool @@ -6967,26 +6857,26 @@ class ComponentContainerProperties(AssetContainer): :vartype latest_version: str :ivar next_version: The next auto incremental version. :vartype next_version: str - :ivar provisioning_state: Provisioning state for the component container. Possible values - include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :ivar provisioning_state: Provisioning state for the component container. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.AssetProvisioningState """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } def __init__( @@ -6995,24 +6885,24 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - is_archived: Optional[bool] = False, - **kwargs - ): + is_archived: bool = False, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword is_archived: Is the asset archived?. :paramtype is_archived: bool """ - super(ComponentContainerProperties, self).__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) self.provisioning_state = None -class ComponentContainerResourceArmPaginatedResult(msrest.serialization.Model): +class ComponentContainerResourceArmPaginatedResult(_serialization.Model): """A paginated list of ComponentContainer entities. :ivar next_link: The link to the next page of ComponentContainer objects. If null, there are no @@ -7023,17 +6913,17 @@ class ComponentContainerResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ComponentContainer]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ComponentContainer]"}, } def __init__( self, *, next_link: Optional[str] = None, - value: Optional[List["ComponentContainer"]] = None, - **kwargs - ): + value: Optional[List["_models.ComponentContainer"]] = None, + **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of ComponentContainer objects. If null, there are no additional pages. @@ -7041,7 +6931,7 @@ def __init__( :keyword value: An array of objects of type ComponentContainer. :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComponentContainer] """ - super(ComponentContainerResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value @@ -7064,37 +6954,32 @@ class ComponentVersion(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.ComponentVersionProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ComponentVersionProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ComponentVersionProperties"}, } - def __init__( - self, - *, - properties: "ComponentVersionProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.ComponentVersionProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComponentVersionProperties """ - super(ComponentVersion, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties @@ -7107,7 +6992,7 @@ class ComponentVersionProperties(AssetBase): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -7118,16 +7003,16 @@ class ComponentVersionProperties(AssetBase): provided it will be used to populate IsArchived. :vartype is_archived: bool :ivar component_spec: Defines Component definition details. - - + + .. raw:: html - + . - :vartype component_spec: any - :ivar provisioning_state: Provisioning state for the component version. Possible values - include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :vartype component_spec: JSON + :ivar provisioning_state: Provisioning state for the component version. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.AssetProvisioningState :ivar stage: Stage in the component lifecycle. @@ -7135,19 +7020,19 @@ class ComponentVersionProperties(AssetBase): """ _validation = { - 'provisioning_state': {'readonly': True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'component_spec': {'key': 'componentSpec', 'type': 'object'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "component_spec": {"key": "componentSpec", "type": "object"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "stage": {"key": "stage", "type": "str"}, } def __init__( @@ -7156,19 +7041,19 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - is_anonymous: Optional[bool] = False, - is_archived: Optional[bool] = False, - component_spec: Optional[Any] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + component_spec: Optional[JSON] = None, stage: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -7179,24 +7064,32 @@ def __init__( provided it will be used to populate IsArchived. :paramtype is_archived: bool :keyword component_spec: Defines Component definition details. - - + + .. raw:: html - + . - :paramtype component_spec: any + :paramtype component_spec: JSON :keyword stage: Stage in the component lifecycle. :paramtype stage: str """ - super(ComponentVersionProperties, self).__init__(description=description, properties=properties, tags=tags, auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs) + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) self.component_spec = component_spec self.provisioning_state = None self.stage = stage -class ComponentVersionResourceArmPaginatedResult(msrest.serialization.Model): +class ComponentVersionResourceArmPaginatedResult(_serialization.Model): """A paginated list of ComponentVersion entities. :ivar next_link: The link to the next page of ComponentVersion objects. If null, there are no @@ -7207,17 +7100,17 @@ class ComponentVersionResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ComponentVersion]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ComponentVersion]"}, } def __init__( self, *, next_link: Optional[str] = None, - value: Optional[List["ComponentVersion"]] = None, - **kwargs - ): + value: Optional[List["_models.ComponentVersion"]] = None, + **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of ComponentVersion objects. If null, there are no additional pages. @@ -7225,12 +7118,12 @@ def __init__( :keyword value: An array of objects of type ComponentVersion. :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComponentVersion] """ - super(ComponentVersionResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class ComputeInstanceSchema(msrest.serialization.Model): +class ComputeInstanceSchema(_serialization.Model): """Properties(top level) of ComputeInstance. :ivar properties: Properties of ComputeInstance. @@ -7238,24 +7131,19 @@ class ComputeInstanceSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'}, + "properties": {"key": "properties", "type": "ComputeInstanceProperties"}, } - def __init__( - self, - *, - properties: Optional["ComputeInstanceProperties"] = None, - **kwargs - ): + def __init__(self, *, properties: Optional["_models.ComputeInstanceProperties"] = None, **kwargs: Any) -> None: """ :keyword properties: Properties of ComputeInstance. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties """ - super(ComputeInstanceSchema, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class ComputeInstance(Compute, ComputeInstanceSchema): +class ComputeInstance(Compute, ComputeInstanceSchema): # pylint: disable=too-many-instance-attributes """An Azure Machine Learning compute instance. Variables are only populated by the server, and will be ignored when sending a request. @@ -7264,15 +7152,15 @@ class ComputeInstance(Compute, ComputeInstanceSchema): :ivar properties: Properties of ComputeInstance. :vartype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType :ivar compute_location: Location for the underlying compute. :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ProvisioningState :ivar description: The description of the Machine Learning compute. @@ -7294,38 +7182,38 @@ class ComputeInstance(Compute, ComputeInstanceSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'ComputeInstanceProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "ComputeInstanceProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } def __init__( self, *, - properties: Optional["ComputeInstanceProperties"] = None, + properties: Optional["_models.ComputeInstanceProperties"] = None, compute_location: Optional[str] = None, description: Optional[str] = None, resource_id: Optional[str] = None, disable_local_auth: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword properties: Properties of ComputeInstance. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ComputeInstanceProperties @@ -7339,9 +7227,16 @@ def __init__( MSI and AAD exclusively for authentication. :paramtype disable_local_auth: bool """ - super(ComputeInstance, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) + super().__init__( + compute_location=compute_location, + description=description, + resource_id=resource_id, + disable_local_auth=disable_local_auth, + properties=properties, + **kwargs + ) self.properties = properties - self.compute_type = 'ComputeInstance' # type: str + self.compute_type: str = "ComputeInstance" self.compute_location = compute_location self.provisioning_state = None self.description = description @@ -7353,7 +7248,7 @@ def __init__( self.disable_local_auth = disable_local_auth -class ComputeInstanceApplication(msrest.serialization.Model): +class ComputeInstanceApplication(_serialization.Model): """Defines an Aml Instance application and its connectivity endpoint URI. :ivar display_name: Name of the ComputeInstance application. @@ -7363,57 +7258,50 @@ class ComputeInstanceApplication(msrest.serialization.Model): """ _attribute_map = { - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'endpoint_uri': {'key': 'endpointUri', 'type': 'str'}, + "display_name": {"key": "displayName", "type": "str"}, + "endpoint_uri": {"key": "endpointUri", "type": "str"}, } def __init__( - self, - *, - display_name: Optional[str] = None, - endpoint_uri: Optional[str] = None, - **kwargs - ): + self, *, display_name: Optional[str] = None, endpoint_uri: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword display_name: Name of the ComputeInstance application. :paramtype display_name: str :keyword endpoint_uri: Application' endpoint URI. :paramtype endpoint_uri: str """ - super(ComputeInstanceApplication, self).__init__(**kwargs) + super().__init__(**kwargs) self.display_name = display_name self.endpoint_uri = endpoint_uri -class ComputeInstanceAutologgerSettings(msrest.serialization.Model): +class ComputeInstanceAutologgerSettings(_serialization.Model): """Specifies settings for autologger. - :ivar mlflow_autologger: Indicates whether mlflow autologger is enabled for notebooks. Possible - values include: "Enabled", "Disabled". + :ivar mlflow_autologger: Indicates whether mlflow autologger is enabled for notebooks. Known + values are: "Enabled" and "Disabled". :vartype mlflow_autologger: str or ~azure.mgmt.machinelearningservices.models.MlflowAutologger """ _attribute_map = { - 'mlflow_autologger': {'key': 'mlflowAutologger', 'type': 'str'}, + "mlflow_autologger": {"key": "mlflowAutologger", "type": "str"}, } def __init__( - self, - *, - mlflow_autologger: Optional[Union[str, "MlflowAutologger"]] = None, - **kwargs - ): + self, *, mlflow_autologger: Optional[Union[str, "_models.MlflowAutologger"]] = None, **kwargs: Any + ) -> None: """ - :keyword mlflow_autologger: Indicates whether mlflow autologger is enabled for notebooks. - Possible values include: "Enabled", "Disabled". + :keyword mlflow_autologger: Indicates whether mlflow autologger is enabled for notebooks. Known + values are: "Enabled" and "Disabled". :paramtype mlflow_autologger: str or ~azure.mgmt.machinelearningservices.models.MlflowAutologger """ - super(ComputeInstanceAutologgerSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.mlflow_autologger = mlflow_autologger -class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model): +class ComputeInstanceConnectivityEndpoints(_serialization.Model): """Defines all connectivity endpoints and properties for an ComputeInstance. Variables are only populated by the server, and will be ignored when sending a request. @@ -7426,82 +7314,78 @@ class ComputeInstanceConnectivityEndpoints(msrest.serialization.Model): """ _validation = { - 'public_ip_address': {'readonly': True}, - 'private_ip_address': {'readonly': True}, + "public_ip_address": {"readonly": True}, + "private_ip_address": {"readonly": True}, } _attribute_map = { - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, + "public_ip_address": {"key": "publicIpAddress", "type": "str"}, + "private_ip_address": {"key": "privateIpAddress", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ComputeInstanceConnectivityEndpoints, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.public_ip_address = None self.private_ip_address = None -class ComputeInstanceContainer(msrest.serialization.Model): +class ComputeInstanceContainer(_serialization.Model): """Defines an Aml Instance container. Variables are only populated by the server, and will be ignored when sending a request. :ivar name: Name of the ComputeInstance container. :vartype name: str - :ivar autosave: Auto save settings. Possible values include: "None", "Local", "Remote". + :ivar autosave: Auto save settings. Known values are: "None", "Local", and "Remote". :vartype autosave: str or ~azure.mgmt.machinelearningservices.models.Autosave :ivar gpu: Information of GPU. :vartype gpu: str - :ivar network: network of this container. Possible values include: "Bridge", "Host". + :ivar network: network of this container. Known values are: "Bridge" and "Host". :vartype network: str or ~azure.mgmt.machinelearningservices.models.Network :ivar environment: Environment information of this container. :vartype environment: ~azure.mgmt.machinelearningservices.models.ComputeInstanceEnvironmentInfo :ivar services: services of this containers. - :vartype services: list[any] + :vartype services: list[JSON] """ _validation = { - 'services': {'readonly': True}, + "services": {"readonly": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'autosave': {'key': 'autosave', 'type': 'str'}, - 'gpu': {'key': 'gpu', 'type': 'str'}, - 'network': {'key': 'network', 'type': 'str'}, - 'environment': {'key': 'environment', 'type': 'ComputeInstanceEnvironmentInfo'}, - 'services': {'key': 'services', 'type': '[object]'}, + "name": {"key": "name", "type": "str"}, + "autosave": {"key": "autosave", "type": "str"}, + "gpu": {"key": "gpu", "type": "str"}, + "network": {"key": "network", "type": "str"}, + "environment": {"key": "environment", "type": "ComputeInstanceEnvironmentInfo"}, + "services": {"key": "services", "type": "[object]"}, } def __init__( self, *, name: Optional[str] = None, - autosave: Optional[Union[str, "Autosave"]] = None, + autosave: Optional[Union[str, "_models.Autosave"]] = None, gpu: Optional[str] = None, - network: Optional[Union[str, "Network"]] = None, - environment: Optional["ComputeInstanceEnvironmentInfo"] = None, - **kwargs - ): + network: Optional[Union[str, "_models.Network"]] = None, + environment: Optional["_models.ComputeInstanceEnvironmentInfo"] = None, + **kwargs: Any + ) -> None: """ :keyword name: Name of the ComputeInstance container. :paramtype name: str - :keyword autosave: Auto save settings. Possible values include: "None", "Local", "Remote". + :keyword autosave: Auto save settings. Known values are: "None", "Local", and "Remote". :paramtype autosave: str or ~azure.mgmt.machinelearningservices.models.Autosave :keyword gpu: Information of GPU. :paramtype gpu: str - :keyword network: network of this container. Possible values include: "Bridge", "Host". + :keyword network: network of this container. Known values are: "Bridge" and "Host". :paramtype network: str or ~azure.mgmt.machinelearningservices.models.Network :keyword environment: Environment information of this container. :paramtype environment: ~azure.mgmt.machinelearningservices.models.ComputeInstanceEnvironmentInfo """ - super(ComputeInstanceContainer, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.autosave = autosave self.gpu = gpu @@ -7510,7 +7394,7 @@ def __init__( self.services = None -class ComputeInstanceCreatedBy(msrest.serialization.Model): +class ComputeInstanceCreatedBy(_serialization.Model): """Describes information on user who created this ComputeInstance. Variables are only populated by the server, and will be ignored when sending a request. @@ -7524,33 +7408,29 @@ class ComputeInstanceCreatedBy(msrest.serialization.Model): """ _validation = { - 'user_name': {'readonly': True}, - 'user_org_id': {'readonly': True}, - 'user_id': {'readonly': True}, + "user_name": {"readonly": True}, + "user_org_id": {"readonly": True}, + "user_id": {"readonly": True}, } _attribute_map = { - 'user_name': {'key': 'userName', 'type': 'str'}, - 'user_org_id': {'key': 'userOrgId', 'type': 'str'}, - 'user_id': {'key': 'userId', 'type': 'str'}, + "user_name": {"key": "userName", "type": "str"}, + "user_org_id": {"key": "userOrgId", "type": "str"}, + "user_id": {"key": "userId", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ComputeInstanceCreatedBy, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.user_name = None self.user_org_id = None self.user_id = None -class ComputeInstanceDataDisk(msrest.serialization.Model): +class ComputeInstanceDataDisk(_serialization.Model): """Defines an Aml Instance DataDisk. - :ivar caching: Caching type of Data Disk. Possible values include: "None", "ReadOnly", + :ivar caching: Caching type of Data Disk. Known values are: "None", "ReadOnly", and "ReadWrite". :vartype caching: str or ~azure.mgmt.machinelearningservices.models.Caching :ivar disk_size_gb: The initial disk size in gigabytes. @@ -7558,30 +7438,30 @@ class ComputeInstanceDataDisk(msrest.serialization.Model): :ivar lun: The lun is used to uniquely identify each data disk. If attaching multiple disks, each should have a distinct lun. :vartype lun: int - :ivar storage_account_type: type of this storage account. Possible values include: - "Standard_LRS", "Premium_LRS". Default value: "Standard_LRS". + :ivar storage_account_type: type of this storage account. Known values are: "Standard_LRS" and + "Premium_LRS". :vartype storage_account_type: str or ~azure.mgmt.machinelearningservices.models.StorageAccountType """ _attribute_map = { - 'caching': {'key': 'caching', 'type': 'str'}, - 'disk_size_gb': {'key': 'diskSizeGB', 'type': 'int'}, - 'lun': {'key': 'lun', 'type': 'int'}, - 'storage_account_type': {'key': 'storageAccountType', 'type': 'str'}, + "caching": {"key": "caching", "type": "str"}, + "disk_size_gb": {"key": "diskSizeGB", "type": "int"}, + "lun": {"key": "lun", "type": "int"}, + "storage_account_type": {"key": "storageAccountType", "type": "str"}, } def __init__( self, *, - caching: Optional[Union[str, "Caching"]] = None, + caching: Optional[Union[str, "_models.Caching"]] = None, disk_size_gb: Optional[int] = None, lun: Optional[int] = None, - storage_account_type: Optional[Union[str, "StorageAccountType"]] = "Standard_LRS", - **kwargs - ): + storage_account_type: Union[str, "_models.StorageAccountType"] = "Standard_LRS", + **kwargs: Any + ) -> None: """ - :keyword caching: Caching type of Data Disk. Possible values include: "None", "ReadOnly", + :keyword caching: Caching type of Data Disk. Known values are: "None", "ReadOnly", and "ReadWrite". :paramtype caching: str or ~azure.mgmt.machinelearningservices.models.Caching :keyword disk_size_gb: The initial disk size in gigabytes. @@ -7589,35 +7469,35 @@ def __init__( :keyword lun: The lun is used to uniquely identify each data disk. If attaching multiple disks, each should have a distinct lun. :paramtype lun: int - :keyword storage_account_type: type of this storage account. Possible values include: - "Standard_LRS", "Premium_LRS". Default value: "Standard_LRS". + :keyword storage_account_type: type of this storage account. Known values are: "Standard_LRS" + and "Premium_LRS". :paramtype storage_account_type: str or ~azure.mgmt.machinelearningservices.models.StorageAccountType """ - super(ComputeInstanceDataDisk, self).__init__(**kwargs) + super().__init__(**kwargs) self.caching = caching self.disk_size_gb = disk_size_gb self.lun = lun self.storage_account_type = storage_account_type -class ComputeInstanceDataMount(msrest.serialization.Model): +class ComputeInstanceDataMount(_serialization.Model): """Defines an Aml Instance DataMount. :ivar source: Source of the ComputeInstance data mount. :vartype source: str - :ivar source_type: Data source type. Possible values include: "Dataset", "Datastore", "URI". + :ivar source_type: Data source type. Known values are: "Dataset", "Datastore", and "URI". :vartype source_type: str or ~azure.mgmt.machinelearningservices.models.SourceType :ivar mount_name: name of the ComputeInstance data mount. :vartype mount_name: str - :ivar mount_action: Mount Action. Possible values include: "Mount", "Unmount". + :ivar mount_action: Mount Action. Known values are: "Mount" and "Unmount". :vartype mount_action: str or ~azure.mgmt.machinelearningservices.models.MountAction :ivar created_by: who this data mount created by. :vartype created_by: str :ivar mount_path: Path of this data mount. :vartype mount_path: str - :ivar mount_state: Mount state. Possible values include: "MountRequested", "Mounted", - "MountFailed", "UnmountRequested", "UnmountFailed", "Unmounted". + :ivar mount_state: Mount state. Known values are: "MountRequested", "Mounted", "MountFailed", + "UnmountRequested", "UnmountFailed", and "Unmounted". :vartype mount_state: str or ~azure.mgmt.machinelearningservices.models.MountState :ivar mounted_on: The time when the disk mounted. :vartype mounted_on: ~datetime.datetime @@ -7626,53 +7506,53 @@ class ComputeInstanceDataMount(msrest.serialization.Model): """ _attribute_map = { - 'source': {'key': 'source', 'type': 'str'}, - 'source_type': {'key': 'sourceType', 'type': 'str'}, - 'mount_name': {'key': 'mountName', 'type': 'str'}, - 'mount_action': {'key': 'mountAction', 'type': 'str'}, - 'created_by': {'key': 'createdBy', 'type': 'str'}, - 'mount_path': {'key': 'mountPath', 'type': 'str'}, - 'mount_state': {'key': 'mountState', 'type': 'str'}, - 'mounted_on': {'key': 'mountedOn', 'type': 'iso-8601'}, - 'error': {'key': 'error', 'type': 'str'}, + "source": {"key": "source", "type": "str"}, + "source_type": {"key": "sourceType", "type": "str"}, + "mount_name": {"key": "mountName", "type": "str"}, + "mount_action": {"key": "mountAction", "type": "str"}, + "created_by": {"key": "createdBy", "type": "str"}, + "mount_path": {"key": "mountPath", "type": "str"}, + "mount_state": {"key": "mountState", "type": "str"}, + "mounted_on": {"key": "mountedOn", "type": "iso-8601"}, + "error": {"key": "error", "type": "str"}, } def __init__( self, *, source: Optional[str] = None, - source_type: Optional[Union[str, "SourceType"]] = None, + source_type: Optional[Union[str, "_models.SourceType"]] = None, mount_name: Optional[str] = None, - mount_action: Optional[Union[str, "MountAction"]] = None, + mount_action: Optional[Union[str, "_models.MountAction"]] = None, created_by: Optional[str] = None, mount_path: Optional[str] = None, - mount_state: Optional[Union[str, "MountState"]] = None, + mount_state: Optional[Union[str, "_models.MountState"]] = None, mounted_on: Optional[datetime.datetime] = None, error: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword source: Source of the ComputeInstance data mount. :paramtype source: str - :keyword source_type: Data source type. Possible values include: "Dataset", "Datastore", "URI". + :keyword source_type: Data source type. Known values are: "Dataset", "Datastore", and "URI". :paramtype source_type: str or ~azure.mgmt.machinelearningservices.models.SourceType :keyword mount_name: name of the ComputeInstance data mount. :paramtype mount_name: str - :keyword mount_action: Mount Action. Possible values include: "Mount", "Unmount". + :keyword mount_action: Mount Action. Known values are: "Mount" and "Unmount". :paramtype mount_action: str or ~azure.mgmt.machinelearningservices.models.MountAction :keyword created_by: who this data mount created by. :paramtype created_by: str :keyword mount_path: Path of this data mount. :paramtype mount_path: str - :keyword mount_state: Mount state. Possible values include: "MountRequested", "Mounted", - "MountFailed", "UnmountRequested", "UnmountFailed", "Unmounted". + :keyword mount_state: Mount state. Known values are: "MountRequested", "Mounted", + "MountFailed", "UnmountRequested", "UnmountFailed", and "Unmounted". :paramtype mount_state: str or ~azure.mgmt.machinelearningservices.models.MountState :keyword mounted_on: The time when the disk mounted. :paramtype mounted_on: ~datetime.datetime :keyword error: Error of this data mount. :paramtype error: str """ - super(ComputeInstanceDataMount, self).__init__(**kwargs) + super().__init__(**kwargs) self.source = source self.source_type = source_type self.mount_name = mount_name @@ -7684,7 +7564,7 @@ def __init__( self.error = error -class ComputeInstanceEnvironmentInfo(msrest.serialization.Model): +class ComputeInstanceEnvironmentInfo(_serialization.Model): """Environment information. :ivar name: name of environment. @@ -7694,84 +7574,78 @@ class ComputeInstanceEnvironmentInfo(msrest.serialization.Model): """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "version": {"key": "version", "type": "str"}, } - def __init__( - self, - *, - name: Optional[str] = None, - version: Optional[str] = None, - **kwargs - ): + def __init__(self, *, name: Optional[str] = None, version: Optional[str] = None, **kwargs: Any) -> None: """ :keyword name: name of environment. :paramtype name: str :keyword version: version of environment. :paramtype version: str """ - super(ComputeInstanceEnvironmentInfo, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.version = version -class ComputeInstanceLastOperation(msrest.serialization.Model): +class ComputeInstanceLastOperation(_serialization.Model): """The last operation on ComputeInstance. - :ivar operation_name: Name of the last operation. Possible values include: "Create", "Start", - "Stop", "Restart", "Resize", "Reimage", "Delete". + :ivar operation_name: Name of the last operation. Known values are: "Create", "Start", "Stop", + "Restart", "Resize", "Reimage", and "Delete". :vartype operation_name: str or ~azure.mgmt.machinelearningservices.models.OperationName :ivar operation_time: Time of the last operation. :vartype operation_time: ~datetime.datetime - :ivar operation_status: Operation status. Possible values include: "InProgress", "Succeeded", + :ivar operation_status: Operation status. Known values are: "InProgress", "Succeeded", "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ResizeFailed", "ReimageFailed", - "DeleteFailed". + and "DeleteFailed". :vartype operation_status: str or ~azure.mgmt.machinelearningservices.models.OperationStatus - :ivar operation_trigger: Trigger of operation. Possible values include: "User", "Schedule", + :ivar operation_trigger: Trigger of operation. Known values are: "User", "Schedule", and "IdleShutdown". :vartype operation_trigger: str or ~azure.mgmt.machinelearningservices.models.OperationTrigger """ _attribute_map = { - 'operation_name': {'key': 'operationName', 'type': 'str'}, - 'operation_time': {'key': 'operationTime', 'type': 'iso-8601'}, - 'operation_status': {'key': 'operationStatus', 'type': 'str'}, - 'operation_trigger': {'key': 'operationTrigger', 'type': 'str'}, + "operation_name": {"key": "operationName", "type": "str"}, + "operation_time": {"key": "operationTime", "type": "iso-8601"}, + "operation_status": {"key": "operationStatus", "type": "str"}, + "operation_trigger": {"key": "operationTrigger", "type": "str"}, } def __init__( self, *, - operation_name: Optional[Union[str, "OperationName"]] = None, + operation_name: Optional[Union[str, "_models.OperationName"]] = None, operation_time: Optional[datetime.datetime] = None, - operation_status: Optional[Union[str, "OperationStatus"]] = None, - operation_trigger: Optional[Union[str, "OperationTrigger"]] = None, - **kwargs - ): + operation_status: Optional[Union[str, "_models.OperationStatus"]] = None, + operation_trigger: Optional[Union[str, "_models.OperationTrigger"]] = None, + **kwargs: Any + ) -> None: """ - :keyword operation_name: Name of the last operation. Possible values include: "Create", - "Start", "Stop", "Restart", "Resize", "Reimage", "Delete". + :keyword operation_name: Name of the last operation. Known values are: "Create", "Start", + "Stop", "Restart", "Resize", "Reimage", and "Delete". :paramtype operation_name: str or ~azure.mgmt.machinelearningservices.models.OperationName :keyword operation_time: Time of the last operation. :paramtype operation_time: ~datetime.datetime - :keyword operation_status: Operation status. Possible values include: "InProgress", - "Succeeded", "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ResizeFailed", - "ReimageFailed", "DeleteFailed". + :keyword operation_status: Operation status. Known values are: "InProgress", "Succeeded", + "CreateFailed", "StartFailed", "StopFailed", "RestartFailed", "ResizeFailed", "ReimageFailed", + and "DeleteFailed". :paramtype operation_status: str or ~azure.mgmt.machinelearningservices.models.OperationStatus - :keyword operation_trigger: Trigger of operation. Possible values include: "User", "Schedule", + :keyword operation_trigger: Trigger of operation. Known values are: "User", "Schedule", and "IdleShutdown". :paramtype operation_trigger: str or ~azure.mgmt.machinelearningservices.models.OperationTrigger """ - super(ComputeInstanceLastOperation, self).__init__(**kwargs) + super().__init__(**kwargs) self.operation_name = operation_name self.operation_time = operation_time self.operation_status = operation_status self.operation_trigger = operation_trigger -class ComputeInstanceProperties(msrest.serialization.Model): +class ComputeInstanceProperties(_serialization.Model): # pylint: disable=too-many-instance-attributes """Compute Instance properties. Variables are only populated by the server, and will be ignored when sending a request. @@ -7783,8 +7657,7 @@ class ComputeInstanceProperties(msrest.serialization.Model): :ivar application_sharing_policy: Policy for sharing applications on this compute instance among users of parent workspace. If Personal, only the creator can access applications on this compute instance. When Shared, any workspace user can access applications on this instance - depending on his/her assigned role. Possible values include: "Personal", "Shared". Default - value: "Shared". + depending on his/her assigned role. Known values are: "Personal" and "Shared". :vartype application_sharing_policy: str or ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy :ivar autologger_settings: Specifies settings for autologger. @@ -7809,13 +7682,13 @@ class ComputeInstanceProperties(msrest.serialization.Model): :vartype created_by: ~azure.mgmt.machinelearningservices.models.ComputeInstanceCreatedBy :ivar errors: Collection of errors encountered on this ComputeInstance. :vartype errors: list[~azure.mgmt.machinelearningservices.models.ErrorResponse] - :ivar state: The current state of this ComputeInstance. Possible values include: "Creating", + :ivar state: The current state of this ComputeInstance. Known values are: "Creating", "CreateFailed", "Deleting", "Running", "Restarting", "Resizing", "JobRunning", "SettingUp", "SetupFailed", "Starting", "Stopped", "Stopping", "UserSettingUp", "UserSetupFailed", - "Unknown", "Unusable". + "Unknown", and "Unusable". :vartype state: str or ~azure.mgmt.machinelearningservices.models.ComputeInstanceState :ivar compute_instance_authorization_type: The Compute Instance Authorization type. Available - values are personal (default). Possible values include: "personal". Default value: "personal". + values are personal (default). "personal" :vartype compute_instance_authorization_type: str or ~azure.mgmt.machinelearningservices.models.ComputeInstanceAuthorizationType :ivar enable_os_patching: Enable Auto OS Patching. Possible values are: true, false. @@ -7853,66 +7726,69 @@ class ComputeInstanceProperties(msrest.serialization.Model): """ _validation = { - 'os_image_metadata': {'readonly': True}, - 'connectivity_endpoints': {'readonly': True}, - 'applications': {'readonly': True}, - 'created_by': {'readonly': True}, - 'errors': {'readonly': True}, - 'state': {'readonly': True}, - 'last_operation': {'readonly': True}, - 'containers': {'readonly': True}, - 'data_disks': {'readonly': True}, - 'data_mounts': {'readonly': True}, - 'versions': {'readonly': True}, - } - - _attribute_map = { - 'vm_size': {'key': 'vmSize', 'type': 'str'}, - 'subnet': {'key': 'subnet', 'type': 'ResourceId'}, - 'application_sharing_policy': {'key': 'applicationSharingPolicy', 'type': 'str'}, - 'autologger_settings': {'key': 'autologgerSettings', 'type': 'ComputeInstanceAutologgerSettings'}, - 'ssh_settings': {'key': 'sshSettings', 'type': 'ComputeInstanceSshSettings'}, - 'custom_services': {'key': 'customServices', 'type': '[CustomService]'}, - 'os_image_metadata': {'key': 'osImageMetadata', 'type': 'ImageMetadata'}, - 'connectivity_endpoints': {'key': 'connectivityEndpoints', 'type': 'ComputeInstanceConnectivityEndpoints'}, - 'applications': {'key': 'applications', 'type': '[ComputeInstanceApplication]'}, - 'created_by': {'key': 'createdBy', 'type': 'ComputeInstanceCreatedBy'}, - 'errors': {'key': 'errors', 'type': '[ErrorResponse]'}, - 'state': {'key': 'state', 'type': 'str'}, - 'compute_instance_authorization_type': {'key': 'computeInstanceAuthorizationType', 'type': 'str'}, - 'enable_os_patching': {'key': 'enableOSPatching', 'type': 'bool'}, - 'release_quota_on_stop': {'key': 'releaseQuotaOnStop', 'type': 'bool'}, - 'personal_compute_instance_settings': {'key': 'personalComputeInstanceSettings', 'type': 'PersonalComputeInstanceSettings'}, - 'setup_scripts': {'key': 'setupScripts', 'type': 'SetupScripts'}, - 'last_operation': {'key': 'lastOperation', 'type': 'ComputeInstanceLastOperation'}, - 'schedules': {'key': 'schedules', 'type': 'ComputeSchedules'}, - 'idle_time_before_shutdown': {'key': 'idleTimeBeforeShutdown', 'type': 'str'}, - 'enable_node_public_ip': {'key': 'enableNodePublicIp', 'type': 'bool'}, - 'containers': {'key': 'containers', 'type': '[ComputeInstanceContainer]'}, - 'data_disks': {'key': 'dataDisks', 'type': '[ComputeInstanceDataDisk]'}, - 'data_mounts': {'key': 'dataMounts', 'type': '[ComputeInstanceDataMount]'}, - 'versions': {'key': 'versions', 'type': 'ComputeInstanceVersion'}, - } - - def __init__( + "os_image_metadata": {"readonly": True}, + "connectivity_endpoints": {"readonly": True}, + "applications": {"readonly": True}, + "created_by": {"readonly": True}, + "errors": {"readonly": True}, + "state": {"readonly": True}, + "last_operation": {"readonly": True}, + "containers": {"readonly": True}, + "data_disks": {"readonly": True}, + "data_mounts": {"readonly": True}, + "versions": {"readonly": True}, + } + + _attribute_map = { + "vm_size": {"key": "vmSize", "type": "str"}, + "subnet": {"key": "subnet", "type": "ResourceId"}, + "application_sharing_policy": {"key": "applicationSharingPolicy", "type": "str"}, + "autologger_settings": {"key": "autologgerSettings", "type": "ComputeInstanceAutologgerSettings"}, + "ssh_settings": {"key": "sshSettings", "type": "ComputeInstanceSshSettings"}, + "custom_services": {"key": "customServices", "type": "[CustomService]"}, + "os_image_metadata": {"key": "osImageMetadata", "type": "ImageMetadata"}, + "connectivity_endpoints": {"key": "connectivityEndpoints", "type": "ComputeInstanceConnectivityEndpoints"}, + "applications": {"key": "applications", "type": "[ComputeInstanceApplication]"}, + "created_by": {"key": "createdBy", "type": "ComputeInstanceCreatedBy"}, + "errors": {"key": "errors", "type": "[ErrorResponse]"}, + "state": {"key": "state", "type": "str"}, + "compute_instance_authorization_type": {"key": "computeInstanceAuthorizationType", "type": "str"}, + "enable_os_patching": {"key": "enableOSPatching", "type": "bool"}, + "release_quota_on_stop": {"key": "releaseQuotaOnStop", "type": "bool"}, + "personal_compute_instance_settings": { + "key": "personalComputeInstanceSettings", + "type": "PersonalComputeInstanceSettings", + }, + "setup_scripts": {"key": "setupScripts", "type": "SetupScripts"}, + "last_operation": {"key": "lastOperation", "type": "ComputeInstanceLastOperation"}, + "schedules": {"key": "schedules", "type": "ComputeSchedules"}, + "idle_time_before_shutdown": {"key": "idleTimeBeforeShutdown", "type": "str"}, + "enable_node_public_ip": {"key": "enableNodePublicIp", "type": "bool"}, + "containers": {"key": "containers", "type": "[ComputeInstanceContainer]"}, + "data_disks": {"key": "dataDisks", "type": "[ComputeInstanceDataDisk]"}, + "data_mounts": {"key": "dataMounts", "type": "[ComputeInstanceDataMount]"}, + "versions": {"key": "versions", "type": "ComputeInstanceVersion"}, + } + + def __init__( # pylint: disable=too-many-locals self, *, vm_size: Optional[str] = None, - subnet: Optional["ResourceId"] = None, - application_sharing_policy: Optional[Union[str, "ApplicationSharingPolicy"]] = "Shared", - autologger_settings: Optional["ComputeInstanceAutologgerSettings"] = None, - ssh_settings: Optional["ComputeInstanceSshSettings"] = None, - custom_services: Optional[List["CustomService"]] = None, - compute_instance_authorization_type: Optional[Union[str, "ComputeInstanceAuthorizationType"]] = "personal", - enable_os_patching: Optional[bool] = False, - release_quota_on_stop: Optional[bool] = False, - personal_compute_instance_settings: Optional["PersonalComputeInstanceSettings"] = None, - setup_scripts: Optional["SetupScripts"] = None, - schedules: Optional["ComputeSchedules"] = None, + subnet: Optional["_models.ResourceId"] = None, + application_sharing_policy: Union[str, "_models.ApplicationSharingPolicy"] = "Shared", + autologger_settings: Optional["_models.ComputeInstanceAutologgerSettings"] = None, + ssh_settings: Optional["_models.ComputeInstanceSshSettings"] = None, + custom_services: Optional[List["_models.CustomService"]] = None, + compute_instance_authorization_type: Union[str, "_models.ComputeInstanceAuthorizationType"] = "personal", + enable_os_patching: bool = False, + release_quota_on_stop: bool = False, + personal_compute_instance_settings: Optional["_models.PersonalComputeInstanceSettings"] = None, + setup_scripts: Optional["_models.SetupScripts"] = None, + schedules: Optional["_models.ComputeSchedules"] = None, idle_time_before_shutdown: Optional[str] = None, - enable_node_public_ip: Optional[bool] = True, - **kwargs - ): + enable_node_public_ip: bool = True, + **kwargs: Any + ) -> None: """ :keyword vm_size: Virtual Machine Size. :paramtype vm_size: str @@ -7921,8 +7797,7 @@ def __init__( :keyword application_sharing_policy: Policy for sharing applications on this compute instance among users of parent workspace. If Personal, only the creator can access applications on this compute instance. When Shared, any workspace user can access applications on this instance - depending on his/her assigned role. Possible values include: "Personal", "Shared". Default - value: "Shared". + depending on his/her assigned role. Known values are: "Personal" and "Shared". :paramtype application_sharing_policy: str or ~azure.mgmt.machinelearningservices.models.ApplicationSharingPolicy :keyword autologger_settings: Specifies settings for autologger. @@ -7933,8 +7808,7 @@ def __init__( :keyword custom_services: List of Custom Services added to the compute. :paramtype custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] :keyword compute_instance_authorization_type: The Compute Instance Authorization type. - Available values are personal (default). Possible values include: "personal". Default value: - "personal". + Available values are personal (default). "personal" :paramtype compute_instance_authorization_type: str or ~azure.mgmt.machinelearningservices.models.ComputeInstanceAuthorizationType :keyword enable_os_patching: Enable Auto OS Patching. Possible values are: true, false. @@ -7959,7 +7833,7 @@ def __init__( public IPs. :paramtype enable_node_public_ip: bool """ - super(ComputeInstanceProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.vm_size = vm_size self.subnet = subnet self.application_sharing_policy = application_sharing_policy @@ -7987,15 +7861,15 @@ def __init__( self.versions = None -class ComputeInstanceSshSettings(msrest.serialization.Model): +class ComputeInstanceSshSettings(_serialization.Model): """Specifies policy and settings for SSH access. Variables are only populated by the server, and will be ignored when sending a request. :ivar ssh_public_access: State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the - public ssh port is open and accessible according to the VNet/subnet policy if applicable. - Possible values include: "Enabled", "Disabled". Default value: "Disabled". + public ssh port is open and accessible according to the VNet/subnet policy if applicable. Known + values are: "Enabled" and "Disabled". :vartype ssh_public_access: str or ~azure.mgmt.machinelearningservices.models.SshPublicAccess :ivar admin_user_name: Describes the admin user name. :vartype admin_user_name: str @@ -8007,42 +7881,42 @@ class ComputeInstanceSshSettings(msrest.serialization.Model): """ _validation = { - 'admin_user_name': {'readonly': True}, - 'ssh_port': {'readonly': True}, + "admin_user_name": {"readonly": True}, + "ssh_port": {"readonly": True}, } _attribute_map = { - 'ssh_public_access': {'key': 'sshPublicAccess', 'type': 'str'}, - 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'admin_public_key': {'key': 'adminPublicKey', 'type': 'str'}, + "ssh_public_access": {"key": "sshPublicAccess", "type": "str"}, + "admin_user_name": {"key": "adminUserName", "type": "str"}, + "ssh_port": {"key": "sshPort", "type": "int"}, + "admin_public_key": {"key": "adminPublicKey", "type": "str"}, } def __init__( self, *, - ssh_public_access: Optional[Union[str, "SshPublicAccess"]] = "Disabled", + ssh_public_access: Union[str, "_models.SshPublicAccess"] = "Disabled", admin_public_key: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword ssh_public_access: State of the public SSH port. Possible values are: Disabled - Indicates that the public ssh port is closed on this instance. Enabled - Indicates that the - public ssh port is open and accessible according to the VNet/subnet policy if applicable. - Possible values include: "Enabled", "Disabled". Default value: "Disabled". + public ssh port is open and accessible according to the VNet/subnet policy if applicable. Known + values are: "Enabled" and "Disabled". :paramtype ssh_public_access: str or ~azure.mgmt.machinelearningservices.models.SshPublicAccess :keyword admin_public_key: Specifies the SSH rsa public key file as a string. Use "ssh-keygen -t rsa -b 2048" to generate your SSH key pairs. :paramtype admin_public_key: str """ - super(ComputeInstanceSshSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.ssh_public_access = ssh_public_access self.admin_user_name = None self.ssh_port = None self.admin_public_key = admin_public_key -class ComputeInstanceVersion(msrest.serialization.Model): +class ComputeInstanceVersion(_serialization.Model): """Version of computeInstance. :ivar runtime: Runtime of compute instance. @@ -8050,24 +7924,19 @@ class ComputeInstanceVersion(msrest.serialization.Model): """ _attribute_map = { - 'runtime': {'key': 'runtime', 'type': 'str'}, + "runtime": {"key": "runtime", "type": "str"}, } - def __init__( - self, - *, - runtime: Optional[str] = None, - **kwargs - ): + def __init__(self, *, runtime: Optional[str] = None, **kwargs: Any) -> None: """ :keyword runtime: Runtime of compute instance. :paramtype runtime: str """ - super(ComputeInstanceVersion, self).__init__(**kwargs) + super().__init__(**kwargs) self.runtime = runtime -class ComputeResourceSchema(msrest.serialization.Model): +class ComputeResourceSchema(_serialization.Model): """ComputeResourceSchema. :ivar properties: Compute properties. @@ -8075,20 +7944,15 @@ class ComputeResourceSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'Compute'}, + "properties": {"key": "properties", "type": "Compute"}, } - def __init__( - self, - *, - properties: Optional["Compute"] = None, - **kwargs - ): + def __init__(self, *, properties: Optional["_models.Compute"] = None, **kwargs: Any) -> None: """ :keyword properties: Compute properties. :paramtype properties: ~azure.mgmt.machinelearningservices.models.Compute """ - super(ComputeResourceSchema, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties @@ -8114,41 +7978,41 @@ class ComputeResource(Resource, ComputeResourceSchema): :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity :ivar location: Specifies the location of the resource. :vartype location: str - :ivar tags: A set of tags. Contains resource tags defined as key/value pairs. + :ivar tags: Contains resource tags defined as key/value pairs. :vartype tags: dict[str, str] :ivar sku: The sku of the workspace. :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'Compute'}, - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "properties": {"key": "properties", "type": "Compute"}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "location": {"key": "location", "type": "str"}, + "tags": {"key": "tags", "type": "{str}"}, + "sku": {"key": "sku", "type": "Sku"}, } def __init__( self, *, - properties: Optional["Compute"] = None, - identity: Optional["ManagedServiceIdentity"] = None, + properties: Optional["_models.Compute"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, - sku: Optional["Sku"] = None, - **kwargs - ): + sku: Optional["_models.Sku"] = None, + **kwargs: Any + ) -> None: """ :keyword properties: Compute properties. :paramtype properties: ~azure.mgmt.machinelearningservices.models.Compute @@ -8156,12 +8020,12 @@ def __init__( :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity :keyword location: Specifies the location of the resource. :paramtype location: str - :keyword tags: A set of tags. Contains resource tags defined as key/value pairs. + :keyword tags: Contains resource tags defined as key/value pairs. :paramtype tags: dict[str, str] :keyword sku: The sku of the workspace. :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ - super(ComputeResource, self).__init__(properties=properties, **kwargs) + super().__init__(properties=properties, **kwargs) self.properties = properties self.identity = identity self.location = location @@ -8173,7 +8037,7 @@ def __init__( self.system_data = None -class ComputeRuntimeDto(msrest.serialization.Model): +class ComputeRuntimeDto(_serialization.Model): """ComputeRuntimeDto. :ivar spark_runtime_version: @@ -8181,24 +8045,19 @@ class ComputeRuntimeDto(msrest.serialization.Model): """ _attribute_map = { - 'spark_runtime_version': {'key': 'sparkRuntimeVersion', 'type': 'str'}, + "spark_runtime_version": {"key": "sparkRuntimeVersion", "type": "str"}, } - def __init__( - self, - *, - spark_runtime_version: Optional[str] = None, - **kwargs - ): + def __init__(self, *, spark_runtime_version: Optional[str] = None, **kwargs: Any) -> None: """ :keyword spark_runtime_version: :paramtype spark_runtime_version: str """ - super(ComputeRuntimeDto, self).__init__(**kwargs) + super().__init__(**kwargs) self.spark_runtime_version = spark_runtime_version -class ComputeSchedules(msrest.serialization.Model): +class ComputeSchedules(_serialization.Model): """The list of schedules to be applied on the computes. :ivar compute_start_stop: The list of compute start stop schedules to be applied. @@ -8207,42 +8066,38 @@ class ComputeSchedules(msrest.serialization.Model): """ _attribute_map = { - 'compute_start_stop': {'key': 'computeStartStop', 'type': '[ComputeStartStopSchedule]'}, + "compute_start_stop": {"key": "computeStartStop", "type": "[ComputeStartStopSchedule]"}, } def __init__( - self, - *, - compute_start_stop: Optional[List["ComputeStartStopSchedule"]] = None, - **kwargs - ): + self, *, compute_start_stop: Optional[List["_models.ComputeStartStopSchedule"]] = None, **kwargs: Any + ) -> None: """ :keyword compute_start_stop: The list of compute start stop schedules to be applied. :paramtype compute_start_stop: list[~azure.mgmt.machinelearningservices.models.ComputeStartStopSchedule] """ - super(ComputeSchedules, self).__init__(**kwargs) + super().__init__(**kwargs) self.compute_start_stop = compute_start_stop -class ComputeStartStopSchedule(msrest.serialization.Model): +class ComputeStartStopSchedule(_serialization.Model): """Compute start stop schedule properties. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: A system assigned id for the schedule. :vartype id: str - :ivar provisioning_status: The current deployment state of schedule. Possible values include: - "Completed", "Provisioning", "Failed". + :ivar provisioning_status: The current deployment state of schedule. Known values are: + "Completed", "Provisioning", and "Failed". :vartype provisioning_status: str or ~azure.mgmt.machinelearningservices.models.ProvisioningStatus - :ivar status: Is the schedule enabled or disabled?. Possible values include: "Enabled", - "Disabled". + :ivar status: Is the schedule enabled or disabled?. Known values are: "Enabled" and "Disabled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus - :ivar action: [Required] The compute power action. Possible values include: "Start", "Stop". + :ivar action: [Required] The compute power action. Known values are: "Start" and "Stop". :vartype action: str or ~azure.mgmt.machinelearningservices.models.ComputePowerAction - :ivar trigger_type: [Required] The schedule trigger type. Possible values include: - "Recurrence", "Cron". + :ivar trigger_type: [Required] The schedule trigger type. Known values are: "Recurrence" and + "Cron". :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType :ivar recurrence: Required if triggerType is Recurrence. :vartype recurrence: ~azure.mgmt.machinelearningservices.models.Recurrence @@ -8253,40 +8108,40 @@ class ComputeStartStopSchedule(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, - 'provisioning_status': {'readonly': True}, + "id": {"readonly": True}, + "provisioning_status": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'action': {'key': 'action', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'recurrence': {'key': 'recurrence', 'type': 'Recurrence'}, - 'cron': {'key': 'cron', 'type': 'Cron'}, - 'schedule': {'key': 'schedule', 'type': 'ScheduleBase'}, + "id": {"key": "id", "type": "str"}, + "provisioning_status": {"key": "provisioningStatus", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "action": {"key": "action", "type": "str"}, + "trigger_type": {"key": "triggerType", "type": "str"}, + "recurrence": {"key": "recurrence", "type": "Recurrence"}, + "cron": {"key": "cron", "type": "Cron"}, + "schedule": {"key": "schedule", "type": "ScheduleBase"}, } def __init__( self, *, - status: Optional[Union[str, "ScheduleStatus"]] = None, - action: Optional[Union[str, "ComputePowerAction"]] = None, - trigger_type: Optional[Union[str, "TriggerType"]] = None, - recurrence: Optional["Recurrence"] = None, - cron: Optional["Cron"] = None, - schedule: Optional["ScheduleBase"] = None, - **kwargs - ): + status: Optional[Union[str, "_models.ScheduleStatus"]] = None, + action: Optional[Union[str, "_models.ComputePowerAction"]] = None, + trigger_type: Optional[Union[str, "_models.TriggerType"]] = None, + recurrence: Optional["_models.Recurrence"] = None, + cron: Optional["_models.Cron"] = None, + schedule: Optional["_models.ScheduleBase"] = None, + **kwargs: Any + ) -> None: """ - :keyword status: Is the schedule enabled or disabled?. Possible values include: "Enabled", + :keyword status: Is the schedule enabled or disabled?. Known values are: "Enabled" and "Disabled". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus - :keyword action: [Required] The compute power action. Possible values include: "Start", "Stop". + :keyword action: [Required] The compute power action. Known values are: "Start" and "Stop". :paramtype action: str or ~azure.mgmt.machinelearningservices.models.ComputePowerAction - :keyword trigger_type: [Required] The schedule trigger type. Possible values include: - "Recurrence", "Cron". + :keyword trigger_type: [Required] The schedule trigger type. Known values are: "Recurrence" and + "Cron". :paramtype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType :keyword recurrence: Required if triggerType is Recurrence. :paramtype recurrence: ~azure.mgmt.machinelearningservices.models.Recurrence @@ -8295,7 +8150,7 @@ def __init__( :keyword schedule: [Deprecated] Not used any more. :paramtype schedule: ~azure.mgmt.machinelearningservices.models.ScheduleBase """ - super(ComputeStartStopSchedule, self).__init__(**kwargs) + super().__init__(**kwargs) self.id = None self.provisioning_status = None self.status = status @@ -8306,7 +8161,7 @@ def __init__( self.schedule = schedule -class ContainerResourceRequirements(msrest.serialization.Model): +class ContainerResourceRequirements(_serialization.Model): """Resource requirements for each container instance within an online deployment. :ivar container_resource_limits: Container resource limit info:. @@ -8318,17 +8173,17 @@ class ContainerResourceRequirements(msrest.serialization.Model): """ _attribute_map = { - 'container_resource_limits': {'key': 'containerResourceLimits', 'type': 'ContainerResourceSettings'}, - 'container_resource_requests': {'key': 'containerResourceRequests', 'type': 'ContainerResourceSettings'}, + "container_resource_limits": {"key": "containerResourceLimits", "type": "ContainerResourceSettings"}, + "container_resource_requests": {"key": "containerResourceRequests", "type": "ContainerResourceSettings"}, } def __init__( self, *, - container_resource_limits: Optional["ContainerResourceSettings"] = None, - container_resource_requests: Optional["ContainerResourceSettings"] = None, - **kwargs - ): + container_resource_limits: Optional["_models.ContainerResourceSettings"] = None, + container_resource_requests: Optional["_models.ContainerResourceSettings"] = None, + **kwargs: Any + ) -> None: """ :keyword container_resource_limits: Container resource limit info:. :paramtype container_resource_limits: @@ -8337,12 +8192,12 @@ def __init__( :paramtype container_resource_requests: ~azure.mgmt.machinelearningservices.models.ContainerResourceSettings """ - super(ContainerResourceRequirements, self).__init__(**kwargs) + super().__init__(**kwargs) self.container_resource_limits = container_resource_limits self.container_resource_requests = container_resource_requests -class ContainerResourceSettings(msrest.serialization.Model): +class ContainerResourceSettings(_serialization.Model): """ContainerResourceSettings. :ivar cpu: Number of vCPUs request/limit for container. More info: @@ -8357,19 +8212,14 @@ class ContainerResourceSettings(msrest.serialization.Model): """ _attribute_map = { - 'cpu': {'key': 'cpu', 'type': 'str'}, - 'gpu': {'key': 'gpu', 'type': 'str'}, - 'memory': {'key': 'memory', 'type': 'str'}, + "cpu": {"key": "cpu", "type": "str"}, + "gpu": {"key": "gpu", "type": "str"}, + "memory": {"key": "memory", "type": "str"}, } def __init__( - self, - *, - cpu: Optional[str] = None, - gpu: Optional[str] = None, - memory: Optional[str] = None, - **kwargs - ): + self, *, cpu: Optional[str] = None, gpu: Optional[str] = None, memory: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword cpu: Number of vCPUs request/limit for container. More info: https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. @@ -8381,13 +8231,13 @@ def __init__( https://kubernetes.io/docs/concepts/configuration/manage-compute-resources-container/. :paramtype memory: str """ - super(ContainerResourceSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.cpu = cpu self.gpu = gpu self.memory = memory -class CosmosDbSettings(msrest.serialization.Model): +class CosmosDbSettings(_serialization.Model): """CosmosDbSettings. :ivar collections_throughput: @@ -8395,57 +8245,52 @@ class CosmosDbSettings(msrest.serialization.Model): """ _attribute_map = { - 'collections_throughput': {'key': 'collectionsThroughput', 'type': 'int'}, + "collections_throughput": {"key": "collectionsThroughput", "type": "int"}, } - def __init__( - self, - *, - collections_throughput: Optional[int] = None, - **kwargs - ): + def __init__(self, *, collections_throughput: Optional[int] = None, **kwargs: Any) -> None: """ :keyword collections_throughput: :paramtype collections_throughput: int """ - super(CosmosDbSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.collections_throughput = collections_throughput -class ScheduleActionBase(msrest.serialization.Model): +class ScheduleActionBase(_serialization.Model): """ScheduleActionBase. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: JobScheduleAction, CreateMonitorAction, ImportDataAction, EndpointScheduleAction. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + JobScheduleAction, CreateMonitorAction, ImportDataAction, EndpointScheduleAction All required parameters must be populated in order to send to Azure. - :ivar action_type: Required. [Required] Specifies the action type of the schedule.Constant - filled by server. Possible values include: "CreateJob", "InvokeBatchEndpoint", "ImportData", - "CreateMonitor". + :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values + are: "CreateJob", "InvokeBatchEndpoint", "ImportData", and "CreateMonitor". :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType """ _validation = { - 'action_type': {'required': True}, + "action_type": {"required": True}, } _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, + "action_type": {"key": "actionType", "type": "str"}, } _subtype_map = { - 'action_type': {'CreateJob': 'JobScheduleAction', 'CreateMonitor': 'CreateMonitorAction', 'ImportData': 'ImportDataAction', 'InvokeBatchEndpoint': 'EndpointScheduleAction'} + "action_type": { + "CreateJob": "JobScheduleAction", + "CreateMonitor": "CreateMonitorAction", + "ImportData": "ImportDataAction", + "InvokeBatchEndpoint": "EndpointScheduleAction", + } } - def __init__( - self, - **kwargs - ): - """ - """ - super(ScheduleActionBase, self).__init__(**kwargs) - self.action_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.action_type: Optional[str] = None class CreateMonitorAction(ScheduleActionBase): @@ -8453,40 +8298,34 @@ class CreateMonitorAction(ScheduleActionBase): All required parameters must be populated in order to send to Azure. - :ivar action_type: Required. [Required] Specifies the action type of the schedule.Constant - filled by server. Possible values include: "CreateJob", "InvokeBatchEndpoint", "ImportData", - "CreateMonitor". + :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values + are: "CreateJob", "InvokeBatchEndpoint", "ImportData", and "CreateMonitor". :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType - :ivar monitor_definition: Required. [Required] Defines the monitor. + :ivar monitor_definition: [Required] Defines the monitor. Required. :vartype monitor_definition: ~azure.mgmt.machinelearningservices.models.MonitorDefinition """ _validation = { - 'action_type': {'required': True}, - 'monitor_definition': {'required': True}, + "action_type": {"required": True}, + "monitor_definition": {"required": True}, } _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, - 'monitor_definition': {'key': 'monitorDefinition', 'type': 'MonitorDefinition'}, + "action_type": {"key": "actionType", "type": "str"}, + "monitor_definition": {"key": "monitorDefinition", "type": "MonitorDefinition"}, } - def __init__( - self, - *, - monitor_definition: "MonitorDefinition", - **kwargs - ): + def __init__(self, *, monitor_definition: "_models.MonitorDefinition", **kwargs: Any) -> None: """ - :keyword monitor_definition: Required. [Required] Defines the monitor. + :keyword monitor_definition: [Required] Defines the monitor. Required. :paramtype monitor_definition: ~azure.mgmt.machinelearningservices.models.MonitorDefinition """ - super(CreateMonitorAction, self).__init__(**kwargs) - self.action_type = 'CreateMonitor' # type: str + super().__init__(**kwargs) + self.action_type: str = "CreateMonitor" self.monitor_definition = monitor_definition -class Cron(msrest.serialization.Model): +class Cron(_serialization.Model): """The workflow trigger cron for ComputeStartStop schedule type. :ivar start_time: The start time in yyyy-MM-ddTHH:mm:ss format. @@ -8501,19 +8340,19 @@ class Cron(msrest.serialization.Model): """ _attribute_map = { - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'expression': {'key': 'expression', 'type': 'str'}, + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "expression": {"key": "expression", "type": "str"}, } def __init__( self, *, start_time: Optional[str] = None, - time_zone: Optional[str] = "UTC", + time_zone: str = "UTC", expression: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword start_time: The start time in yyyy-MM-ddTHH:mm:ss format. :paramtype start_time: str @@ -8525,17 +8364,17 @@ def __init__( The expression should follow NCronTab format. :paramtype expression: str """ - super(Cron, self).__init__(**kwargs) + super().__init__(**kwargs) self.start_time = start_time self.time_zone = time_zone self.expression = expression -class TriggerBase(msrest.serialization.Model): +class TriggerBase(_serialization.Model): """TriggerBase. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CronTrigger, RecurrenceTrigger. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CronTrigger, RecurrenceTrigger All required parameters must be populated in order to send to Azure. @@ -8551,34 +8390,26 @@ class TriggerBase(msrest.serialization.Model): TimeZone should follow Windows time zone format. Refer: https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. :vartype time_zone: str - :ivar trigger_type: Required. [Required].Constant filled by server. Possible values include: - "Recurrence", "Cron". + :ivar trigger_type: [Required]. Required. Known values are: "Recurrence" and "Cron". :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType """ _validation = { - 'trigger_type': {'required': True}, + "trigger_type": {"required": True}, } _attribute_map = { - 'end_time': {'key': 'endTime', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, + "end_time": {"key": "endTime", "type": "str"}, + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "trigger_type": {"key": "triggerType", "type": "str"}, } - _subtype_map = { - 'trigger_type': {'Cron': 'CronTrigger', 'Recurrence': 'RecurrenceTrigger'} - } + _subtype_map = {"trigger_type": {"Cron": "CronTrigger", "Recurrence": "RecurrenceTrigger"}} def __init__( - self, - *, - end_time: Optional[str] = None, - start_time: Optional[str] = None, - time_zone: Optional[str] = "UTC", - **kwargs - ): + self, *, end_time: Optional[str] = None, start_time: Optional[str] = None, time_zone: str = "UTC", **kwargs: Any + ) -> None: """ :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer https://en.wikipedia.org/wiki/ISO_8601. @@ -8593,11 +8424,11 @@ def __init__( https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. :paramtype time_zone: str """ - super(TriggerBase, self).__init__(**kwargs) + super().__init__(**kwargs) self.end_time = end_time self.start_time = start_time self.time_zone = time_zone - self.trigger_type = None # type: Optional[str] + self.trigger_type: Optional[str] = None class CronTrigger(TriggerBase): @@ -8617,25 +8448,24 @@ class CronTrigger(TriggerBase): TimeZone should follow Windows time zone format. Refer: https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. :vartype time_zone: str - :ivar trigger_type: Required. [Required].Constant filled by server. Possible values include: - "Recurrence", "Cron". + :ivar trigger_type: [Required]. Required. Known values are: "Recurrence" and "Cron". :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType - :ivar expression: Required. [Required] Specifies cron expression of schedule. - The expression should follow NCronTab format. + :ivar expression: [Required] Specifies cron expression of schedule. + The expression should follow NCronTab format. Required. :vartype expression: str """ _validation = { - 'trigger_type': {'required': True}, - 'expression': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "trigger_type": {"required": True}, + "expression": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'end_time': {'key': 'endTime', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'expression': {'key': 'expression', 'type': 'str'}, + "end_time": {"key": "endTime", "type": "str"}, + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "trigger_type": {"key": "triggerType", "type": "str"}, + "expression": {"key": "expression", "type": "str"}, } def __init__( @@ -8644,9 +8474,9 @@ def __init__( expression: str, end_time: Optional[str] = None, start_time: Optional[str] = None, - time_zone: Optional[str] = "UTC", - **kwargs - ): + time_zone: str = "UTC", + **kwargs: Any + ) -> None: """ :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer https://en.wikipedia.org/wiki/ISO_8601. @@ -8660,12 +8490,12 @@ def __init__( TimeZone should follow Windows time zone format. Refer: https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. :paramtype time_zone: str - :keyword expression: Required. [Required] Specifies cron expression of schedule. - The expression should follow NCronTab format. + :keyword expression: [Required] Specifies cron expression of schedule. + The expression should follow NCronTab format. Required. :paramtype expression: str """ - super(CronTrigger, self).__init__(end_time=end_time, start_time=start_time, time_zone=time_zone, **kwargs) - self.trigger_type = 'Cron' # type: str + super().__init__(end_time=end_time, start_time=start_time, time_zone=time_zone, **kwargs) + self.trigger_type: str = "Cron" self.expression = expression @@ -8679,9 +8509,9 @@ class CsvExportSummary(ExportSummary): :ivar end_date_time: The time when the export was completed. :vartype end_date_time: ~datetime.datetime :ivar exported_row_count: The total number of labeled datapoints exported. - :vartype exported_row_count: long - :ivar format: Required. [Required] The format of exported labels, also as the - discriminator.Constant filled by server. Possible values include: "Dataset", "Coco", "CSV". + :vartype exported_row_count: int + :ivar format: [Required] The format of exported labels, also as the discriminator. Required. + Known values are: "Dataset", "Coco", and "CSV". :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType :ivar labeling_job_id: Name and identifier of the job containing exported labels. :vartype labeling_job_id: str @@ -8694,33 +8524,29 @@ class CsvExportSummary(ExportSummary): """ _validation = { - 'end_date_time': {'readonly': True}, - 'exported_row_count': {'readonly': True}, - 'format': {'required': True}, - 'labeling_job_id': {'readonly': True}, - 'start_date_time': {'readonly': True}, - 'container_name': {'readonly': True}, - 'snapshot_path': {'readonly': True}, + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, + "container_name": {"readonly": True}, + "snapshot_path": {"readonly": True}, } _attribute_map = { - 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, - 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, - 'format': {'key': 'format', 'type': 'str'}, - 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, - 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, - 'container_name': {'key': 'containerName', 'type': 'str'}, - 'snapshot_path': {'key': 'snapshotPath', 'type': 'str'}, + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "int"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, + "container_name": {"key": "containerName", "type": "str"}, + "snapshot_path": {"key": "snapshotPath", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(CsvExportSummary, self).__init__(**kwargs) - self.format = 'CSV' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.format: str = "CSV" self.container_name = None self.snapshot_path = None @@ -8730,35 +8556,30 @@ class CustomForecastHorizon(ForecastHorizon): All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] Set forecast horizon value selection mode.Constant filled by - server. Possible values include: "Auto", "Custom". + :ivar mode: [Required] Set forecast horizon value selection mode. Required. Known values are: + "Auto" and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.ForecastHorizonMode - :ivar value: Required. [Required] Forecast horizon value. + :ivar value: [Required] Forecast horizon value. Required. :vartype value: int """ _validation = { - 'mode': {'required': True}, - 'value': {'required': True}, + "mode": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'int'}, + "mode": {"key": "mode", "type": "str"}, + "value": {"key": "value", "type": "int"}, } - def __init__( - self, - *, - value: int, - **kwargs - ): + def __init__(self, *, value: int, **kwargs: Any) -> None: """ - :keyword value: Required. [Required] Forecast horizon value. + :keyword value: [Required] Forecast horizon value. Required. :paramtype value: int """ - super(CustomForecastHorizon, self).__init__(**kwargs) - self.mode = 'Custom' # type: str + super().__init__(**kwargs) + self.mode: str = "Custom" self.value = value @@ -8767,8 +8588,8 @@ class CustomInferencingServer(InferencingServer): All required parameters must be populated in order to send to Azure. - :ivar server_type: Required. [Required] Inferencing server type for various targets.Constant - filled by server. Possible values include: "AzureMLOnline", "AzureMLBatch", "Triton", "Custom". + :ivar server_type: [Required] Inferencing server type for various targets. Required. Known + values are: "AzureMLOnline", "AzureMLBatch", "Triton", and "Custom". :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType :ivar inference_configuration: Inference configuration for custom inferencing. :vartype inference_configuration: @@ -8776,31 +8597,28 @@ class CustomInferencingServer(InferencingServer): """ _validation = { - 'server_type': {'required': True}, + "server_type": {"required": True}, } _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, - 'inference_configuration': {'key': 'inferenceConfiguration', 'type': 'OnlineInferenceConfiguration'}, + "server_type": {"key": "serverType", "type": "str"}, + "inference_configuration": {"key": "inferenceConfiguration", "type": "OnlineInferenceConfiguration"}, } def __init__( - self, - *, - inference_configuration: Optional["OnlineInferenceConfiguration"] = None, - **kwargs - ): + self, *, inference_configuration: Optional["_models.OnlineInferenceConfiguration"] = None, **kwargs: Any + ) -> None: """ :keyword inference_configuration: Inference configuration for custom inferencing. :paramtype inference_configuration: ~azure.mgmt.machinelearningservices.models.OnlineInferenceConfiguration """ - super(CustomInferencingServer, self).__init__(**kwargs) - self.server_type = 'Custom' # type: str + super().__init__(**kwargs) + self.server_type: str = "Custom" self.inference_configuration = inference_configuration -class CustomKeys(msrest.serialization.Model): +class CustomKeys(_serialization.Model): """Custom Keys credential object. :ivar keys: Dictionary of :code:``. @@ -8808,45 +8626,48 @@ class CustomKeys(msrest.serialization.Model): """ _attribute_map = { - 'keys': {'key': 'keys', 'type': '{str}'}, + "keys": {"key": "keys", "type": "{str}"}, } - def __init__( - self, - *, - keys: Optional[Dict[str, str]] = None, - **kwargs - ): + def __init__(self, *, keys: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: """ :keyword keys: Dictionary of :code:``. :paramtype keys: dict[str, str] """ - super(CustomKeys, self).__init__(**kwargs) + super().__init__(**kwargs) self.keys = keys class CustomKeysWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """Category:= CustomKeys -AuthType:= CustomKeys (as type discriminator) -Credentials:= {CustomKeys} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.CustomKeys -Target:= {any value} -Use Metadata property bag for ApiVersion and other metadata fields. + AuthType:= CustomKeys (as type discriminator) + Credentials:= {CustomKeys} as + Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.CustomKeys + Target:= {any value} + Use Metadata property bag for ApiVersion and other metadata fields. + + Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", + :ivar category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar created_by_workspace_arm_id: The arm id of the workspace which created this connection. + :vartype created_by_workspace_arm_id: str :ivar expiry_time: :vartype expiry_time: ~datetime.datetime + :ivar is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :vartype is_shared_to_all: bool :ivar metadata: Any object. - :vartype metadata: any + :vartype metadata: JSON :ivar target: :vartype target: str :ivar credentials: Custom Keys credential object. @@ -8854,54 +8675,68 @@ class CustomKeysWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """ _validation = { - 'auth_type': {'required': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'CustomKeys'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "CustomKeys"}, } def __init__( self, *, - category: Optional[Union[str, "ConnectionCategory"]] = None, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, expiry_time: Optional[datetime.datetime] = None, - metadata: Optional[Any] = None, + is_shared_to_all: Optional[bool] = None, + metadata: Optional[JSON] = None, target: Optional[str] = None, - credentials: Optional["CustomKeys"] = None, - **kwargs - ): + credentials: Optional["_models.CustomKeys"] = None, + **kwargs: Any + ) -> None: """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", + :keyword category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime + :keyword is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :paramtype is_shared_to_all: bool :keyword metadata: Any object. - :paramtype metadata: any + :paramtype metadata: JSON :keyword target: :paramtype target: str :keyword credentials: Custom Keys credential object. :paramtype credentials: ~azure.mgmt.machinelearningservices.models.CustomKeys """ - super(CustomKeysWorkspaceConnectionProperties, self).__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) - self.auth_type = 'CustomKeys' # type: str + super().__init__( + category=category, + expiry_time=expiry_time, + is_shared_to_all=is_shared_to_all, + metadata=metadata, + target=target, + **kwargs + ) + self.auth_type: str = "CustomKeys" self.credentials = credentials -class CustomMetricThreshold(msrest.serialization.Model): +class CustomMetricThreshold(_serialization.Model): """CustomMetricThreshold. All required parameters must be populated in order to send to Azure. - :ivar metric: Required. [Required] The user-defined metric to calculate. + :ivar metric: [Required] The user-defined metric to calculate. Required. :vartype metric: str :ivar threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -8909,301 +8744,320 @@ class CustomMetricThreshold(msrest.serialization.Model): """ _validation = { - 'metric': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "metric": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'metric': {'key': 'metric', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "metric": {"key": "metric", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } def __init__( - self, - *, - metric: str, - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + self, *, metric: str, threshold: Optional["_models.MonitoringThreshold"] = None, **kwargs: Any + ) -> None: """ - :keyword metric: Required. [Required] The user-defined metric to calculate. + :keyword metric: [Required] The user-defined metric to calculate. Required. :paramtype metric: str :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ - super(CustomMetricThreshold, self).__init__(**kwargs) + super().__init__(**kwargs) self.metric = metric self.threshold = threshold -class JobInput(msrest.serialization.Model): +class JobInput(_serialization.Model): """Command job definition. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CustomModelJobInput, LiteralJobInput, MLFlowModelJobInput, MLTableJobInput, TritonModelJobInput, UriFileJobInput, UriFolderJobInput. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CustomModelJobInput, LiteralJobInput, MLFlowModelJobInput, MLTableJobInput, + TritonModelJobInput, UriFileJobInput, UriFolderJobInput All required parameters must be populated in order to send to Azure. :ivar description: Description for the input. :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType """ _validation = { - 'job_input_type': {'required': True}, + "job_input_type": {"required": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, } _subtype_map = { - 'job_input_type': {'custom_model': 'CustomModelJobInput', 'literal': 'LiteralJobInput', 'mlflow_model': 'MLFlowModelJobInput', 'mltable': 'MLTableJobInput', 'triton_model': 'TritonModelJobInput', 'uri_file': 'UriFileJobInput', 'uri_folder': 'UriFolderJobInput'} + "job_input_type": { + "custom_model": "CustomModelJobInput", + "literal": "LiteralJobInput", + "mlflow_model": "MLFlowModelJobInput", + "mltable": "MLTableJobInput", + "triton_model": "TritonModelJobInput", + "uri_file": "UriFileJobInput", + "uri_folder": "UriFolderJobInput", + } } - def __init__( - self, - *, - description: Optional[str] = None, - **kwargs - ): + def __init__(self, *, description: Optional[str] = None, **kwargs: Any) -> None: """ :keyword description: Description for the input. :paramtype description: str """ - super(JobInput, self).__init__(**kwargs) + super().__init__(**kwargs) self.description = description - self.job_input_type = None # type: Optional[str] + self.job_input_type: Optional[str] = None -class CustomModelJobInput(JobInput, AssetJobInput): +class CustomModelJobInput(AssetJobInput, JobInput): """CustomModelJobInput. All required parameters must be populated in order to send to Azure. - :ivar mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str :ivar description: Description for the input. :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str """ _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, uri: str, - mode: Optional[Union[str, "InputDeliveryMode"]] = None, description: Optional[str] = None, - **kwargs - ): + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + **kwargs: Any + ) -> None: """ - :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str :keyword description: Description for the input. :paramtype description: str + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str """ - super(CustomModelJobInput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_input_type: str = "custom_model" self.mode = mode self.uri = uri - self.job_input_type = 'custom_model' # type: str - self.description = description -class JobOutput(msrest.serialization.Model): +class JobOutput(_serialization.Model): """Job output definition container information on where to find job output/logs. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CustomModelJobOutput, MLFlowModelJobOutput, MLTableJobOutput, TritonModelJobOutput, UriFileJobOutput, UriFolderJobOutput. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CustomModelJobOutput, MLFlowModelJobOutput, MLTableJobOutput, TritonModelJobOutput, + UriFileJobOutput, UriFolderJobOutput All required parameters must be populated in order to send to Azure. :ivar description: Description for the output. :vartype description: str - :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by - server. Possible values include: "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType """ _validation = { - 'job_output_type': {'required': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, } _subtype_map = { - 'job_output_type': {'custom_model': 'CustomModelJobOutput', 'mlflow_model': 'MLFlowModelJobOutput', 'mltable': 'MLTableJobOutput', 'triton_model': 'TritonModelJobOutput', 'uri_file': 'UriFileJobOutput', 'uri_folder': 'UriFolderJobOutput'} + "job_output_type": { + "custom_model": "CustomModelJobOutput", + "mlflow_model": "MLFlowModelJobOutput", + "mltable": "MLTableJobOutput", + "triton_model": "TritonModelJobOutput", + "uri_file": "UriFileJobOutput", + "uri_folder": "UriFolderJobOutput", + } } - def __init__( - self, - *, - description: Optional[str] = None, - **kwargs - ): + def __init__(self, *, description: Optional[str] = None, **kwargs: Any) -> None: """ :keyword description: Description for the output. :paramtype description: str """ - super(JobOutput, self).__init__(**kwargs) + super().__init__(**kwargs) self.description = description - self.job_output_type = None # type: Optional[str] + self.job_output_type: Optional[str] = None -class CustomModelJobOutput(JobOutput, AssetJobOutput): +class CustomModelJobOutput(AssetJobOutput, JobOutput): """CustomModelJobOutput. All required parameters must be populated in order to send to Azure. + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType :ivar asset_name: Output Asset Name. :vartype asset_name: str :ivar asset_version: Output Asset Version. :vartype asset_version: str :ivar auto_delete_setting: Auto delete setting of output data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and "Direct". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :ivar uri: Output Asset URI. :vartype uri: str - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by - server. Possible values include: "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType """ _validation = { - 'job_output_type': {'required': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, + description: Optional[str] = None, asset_name: Optional[str] = None, asset_version: Optional[str] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - mode: Optional[Union[str, "OutputDeliveryMode"]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, uri: Optional[str] = None, - description: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ + :keyword description: Description for the output. + :paramtype description: str :keyword asset_name: Output Asset Name. :paramtype asset_name: str :keyword asset_version: Output Asset Version. :paramtype asset_version: str :keyword auto_delete_setting: Auto delete setting of output data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and "Direct". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :keyword uri: Output Asset URI. :paramtype uri: str - :keyword description: Description for the output. - :paramtype description: str """ - super(CustomModelJobOutput, self).__init__(description=description, asset_name=asset_name, asset_version=asset_version, auto_delete_setting=auto_delete_setting, mode=mode, uri=uri, **kwargs) + super().__init__( + asset_name=asset_name, + asset_version=asset_version, + auto_delete_setting=auto_delete_setting, + mode=mode, + uri=uri, + description=description, + **kwargs + ) + self.description = description + self.job_output_type: str = "custom_model" self.asset_name = asset_name self.asset_version = asset_version self.auto_delete_setting = auto_delete_setting self.mode = mode self.uri = uri - self.job_output_type = 'custom_model' # type: str - self.description = description -class MonitoringSignalBase(msrest.serialization.Model): +class MonitoringSignalBase(_serialization.Model): """MonitoringSignalBase. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: CustomMonitoringSignal, DataDriftMonitoringSignal, DataQualityMonitoringSignal, FeatureAttributionDriftMonitoringSignal, GenerationSafetyQualityMonitoringSignal, GenerationTokenStatisticsSignal, ModelPerformanceSignal, PredictionDriftMonitoringSignal. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + CustomMonitoringSignal, DataDriftMonitoringSignal, DataQualityMonitoringSignal, + FeatureAttributionDriftMonitoringSignal, GenerationSafetyQualityMonitoringSignal, + GenerationTokenStatisticsSignal, ModelPerformanceSignal, PredictionDriftMonitoringSignal All required parameters must be populated in order to send to Azure. - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and "Enabled". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :ivar properties: Property dictionary. Properties can be added, but not removed or altered. :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType """ _validation = { - 'signal_type': {'required': True}, + "signal_type": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, } _subtype_map = { - 'signal_type': {'Custom': 'CustomMonitoringSignal', 'DataDrift': 'DataDriftMonitoringSignal', 'DataQuality': 'DataQualityMonitoringSignal', 'FeatureAttributionDrift': 'FeatureAttributionDriftMonitoringSignal', 'GenerationSafetyQuality': 'GenerationSafetyQualityMonitoringSignal', 'GenerationTokenStatistics': 'GenerationTokenStatisticsSignal', 'ModelPerformance': 'ModelPerformanceSignal', 'PredictionDrift': 'PredictionDriftMonitoringSignal'} + "signal_type": { + "Custom": "CustomMonitoringSignal", + "DataDrift": "DataDriftMonitoringSignal", + "DataQuality": "DataQualityMonitoringSignal", + "FeatureAttributionDrift": "FeatureAttributionDriftMonitoringSignal", + "GenerationSafetyQuality": "GenerationSafetyQualityMonitoringSignal", + "GenerationTokenStatistics": "GenerationTokenStatisticsSignal", + "ModelPerformance": "ModelPerformanceSignal", + "PredictionDrift": "PredictionDriftMonitoringSignal", + } } def __init__( self, *, - mode: Optional[Union[str, "MonitoringNotificationMode"]] = None, + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, properties: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :keyword properties: Property dictionary. Properties can be added, but not removed or altered. :paramtype properties: dict[str, str] """ - super(MonitoringSignalBase, self).__init__(**kwargs) + super().__init__(**kwargs) self.mode = mode self.properties = properties - self.signal_type = None # type: Optional[str] + self.signal_type: Optional[str] = None class CustomMonitoringSignal(MonitoringSignalBase): @@ -9211,18 +9065,17 @@ class CustomMonitoringSignal(MonitoringSignalBase): All required parameters must be populated in order to send to Azure. - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and "Enabled". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :ivar properties: Property dictionary. Properties can be added, but not removed or altered. :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType - :ivar component_id: Required. [Required] ARM resource ID of the component resource used to - calculate the custom metrics. + :ivar component_id: [Required] ARM resource ID of the component resource used to calculate the + custom metrics. Required. :vartype component_id: str :ivar input_assets: Monitoring assets to take as input. Key is the component input port name, value is the data asset. @@ -9231,54 +9084,54 @@ class CustomMonitoringSignal(MonitoringSignalBase): :ivar inputs: Extra component parameters to take as input. Key is the component literal input port name, value is the parameter value. :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] - :ivar metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. + :ivar metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. :vartype metric_thresholds: list[~azure.mgmt.machinelearningservices.models.CustomMetricThreshold] - :ivar workspace_connection: Required. [Required] A list of metrics to calculate and their - associated thresholds. + :ivar workspace_connection: [Required] A list of metrics to calculate and their associated + thresholds. Required. :vartype workspace_connection: ~azure.mgmt.machinelearningservices.models.MonitoringWorkspaceConnection """ _validation = { - 'signal_type': {'required': True}, - 'component_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'metric_thresholds': {'required': True}, - 'workspace_connection': {'required': True}, + "signal_type": {"required": True}, + "component_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "metric_thresholds": {"required": True}, + "workspace_connection": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'input_assets': {'key': 'inputAssets', 'type': '{MonitoringInputDataBase}'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[CustomMetricThreshold]'}, - 'workspace_connection': {'key': 'workspaceConnection', 'type': 'MonitoringWorkspaceConnection'}, + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "component_id": {"key": "componentId", "type": "str"}, + "input_assets": {"key": "inputAssets", "type": "{MonitoringInputDataBase}"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[CustomMetricThreshold]"}, + "workspace_connection": {"key": "workspaceConnection", "type": "MonitoringWorkspaceConnection"}, } def __init__( self, *, component_id: str, - metric_thresholds: List["CustomMetricThreshold"], - workspace_connection: "MonitoringWorkspaceConnection", - mode: Optional[Union[str, "MonitoringNotificationMode"]] = None, + metric_thresholds: List["_models.CustomMetricThreshold"], + workspace_connection: "_models.MonitoringWorkspaceConnection", + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, properties: Optional[Dict[str, str]] = None, - input_assets: Optional[Dict[str, "MonitoringInputDataBase"]] = None, - inputs: Optional[Dict[str, "JobInput"]] = None, - **kwargs - ): + input_assets: Optional[Dict[str, "_models.MonitoringInputDataBase"]] = None, + inputs: Optional[Dict[str, "_models.JobInput"]] = None, + **kwargs: Any + ) -> None: """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :keyword properties: Property dictionary. Properties can be added, but not removed or altered. :paramtype properties: dict[str, str] - :keyword component_id: Required. [Required] ARM resource ID of the component resource used to - calculate the custom metrics. + :keyword component_id: [Required] ARM resource ID of the component resource used to calculate + the custom metrics. Required. :paramtype component_id: str :keyword input_assets: Monitoring assets to take as input. Key is the component input port name, value is the data asset. @@ -9287,17 +9140,17 @@ def __init__( :keyword inputs: Extra component parameters to take as input. Key is the component literal input port name, value is the parameter value. :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] - :keyword metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. + :keyword metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. :paramtype metric_thresholds: list[~azure.mgmt.machinelearningservices.models.CustomMetricThreshold] - :keyword workspace_connection: Required. [Required] A list of metrics to calculate and their - associated thresholds. + :keyword workspace_connection: [Required] A list of metrics to calculate and their associated + thresholds. Required. :paramtype workspace_connection: ~azure.mgmt.machinelearningservices.models.MonitoringWorkspaceConnection """ - super(CustomMonitoringSignal, self).__init__(mode=mode, properties=properties, **kwargs) - self.signal_type = 'Custom' # type: str + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "Custom" self.component_id = component_id self.input_assets = input_assets self.inputs = inputs @@ -9310,35 +9163,30 @@ class CustomNCrossValidations(NCrossValidations): All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] Mode for determining N-Cross validations.Constant filled by - server. Possible values include: "Auto", "Custom". + :ivar mode: [Required] Mode for determining N-Cross validations. Required. Known values are: + "Auto" and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.NCrossValidationsMode - :ivar value: Required. [Required] N-Cross validations value. + :ivar value: [Required] N-Cross validations value. Required. :vartype value: int """ _validation = { - 'mode': {'required': True}, - 'value': {'required': True}, + "mode": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'int'}, + "mode": {"key": "mode", "type": "str"}, + "value": {"key": "value", "type": "int"}, } - def __init__( - self, - *, - value: int, - **kwargs - ): + def __init__(self, *, value: int, **kwargs: Any) -> None: """ - :keyword value: Required. [Required] N-Cross validations value. + :keyword value: [Required] N-Cross validations value. Required. :paramtype value: int """ - super(CustomNCrossValidations, self).__init__(**kwargs) - self.mode = 'Custom' # type: str + super().__init__(**kwargs) + self.mode: str = "Custom" self.value = value @@ -9347,39 +9195,33 @@ class CustomSeasonality(Seasonality): All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] Seasonality mode.Constant filled by server. Possible values - include: "Auto", "Custom". + :ivar mode: [Required] Seasonality mode. Required. Known values are: "Auto" and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.SeasonalityMode - :ivar value: Required. [Required] Seasonality value. + :ivar value: [Required] Seasonality value. Required. :vartype value: int """ _validation = { - 'mode': {'required': True}, - 'value': {'required': True}, + "mode": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'int'}, + "mode": {"key": "mode", "type": "str"}, + "value": {"key": "value", "type": "int"}, } - def __init__( - self, - *, - value: int, - **kwargs - ): + def __init__(self, *, value: int, **kwargs: Any) -> None: """ - :keyword value: Required. [Required] Seasonality value. + :keyword value: [Required] Seasonality value. Required. :paramtype value: int """ - super(CustomSeasonality, self).__init__(**kwargs) - self.mode = 'Custom' # type: str + super().__init__(**kwargs) + self.mode: str = "Custom" self.value = value -class CustomService(msrest.serialization.Model): +class CustomService(_serialization.Model): """Specifies the custom service configuration. :ivar additional_properties: Unmatched properties from the message are deserialized to this @@ -9401,13 +9243,13 @@ class CustomService(msrest.serialization.Model): """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'name': {'key': 'name', 'type': 'str'}, - 'image': {'key': 'image', 'type': 'Image'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{EnvironmentVariable}'}, - 'docker': {'key': 'docker', 'type': 'Docker'}, - 'endpoints': {'key': 'endpoints', 'type': '[Endpoint]'}, - 'volumes': {'key': 'volumes', 'type': '[VolumeDefinition]'}, + "additional_properties": {"key": "", "type": "{object}"}, + "name": {"key": "name", "type": "str"}, + "image": {"key": "image", "type": "Image"}, + "environment_variables": {"key": "environmentVariables", "type": "{EnvironmentVariable}"}, + "docker": {"key": "docker", "type": "Docker"}, + "endpoints": {"key": "endpoints", "type": "[Endpoint]"}, + "volumes": {"key": "volumes", "type": "[VolumeDefinition]"}, } def __init__( @@ -9415,13 +9257,13 @@ def __init__( *, additional_properties: Optional[Dict[str, Any]] = None, name: Optional[str] = None, - image: Optional["Image"] = None, - environment_variables: Optional[Dict[str, "EnvironmentVariable"]] = None, - docker: Optional["Docker"] = None, - endpoints: Optional[List["Endpoint"]] = None, - volumes: Optional[List["VolumeDefinition"]] = None, - **kwargs - ): + image: Optional["_models.Image"] = None, + environment_variables: Optional[Dict[str, "_models.EnvironmentVariable"]] = None, + docker: Optional["_models.Docker"] = None, + endpoints: Optional[List["_models.Endpoint"]] = None, + volumes: Optional[List["_models.VolumeDefinition"]] = None, + **kwargs: Any + ) -> None: """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. @@ -9440,7 +9282,7 @@ def __init__( :keyword volumes: Configuring the volumes for the container. :paramtype volumes: list[~azure.mgmt.machinelearningservices.models.VolumeDefinition] """ - super(CustomService, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.name = name self.image = image @@ -9455,35 +9297,30 @@ class CustomTargetLags(TargetLags): All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] Set target lags mode - Auto/Custom.Constant filled by server. - Possible values include: "Auto", "Custom". + :ivar mode: [Required] Set target lags mode - Auto/Custom. Required. Known values are: "Auto" + and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetLagsMode - :ivar values: Required. [Required] Set target lags values. + :ivar values: [Required] Set target lags values. Required. :vartype values: list[int] """ _validation = { - 'mode': {'required': True}, - 'values': {'required': True}, + "mode": {"required": True}, + "values": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[int]'}, + "mode": {"key": "mode", "type": "str"}, + "values": {"key": "values", "type": "[int]"}, } - def __init__( - self, - *, - values: List[int], - **kwargs - ): + def __init__(self, *, values: List[int], **kwargs: Any) -> None: """ - :keyword values: Required. [Required] Set target lags values. + :keyword values: [Required] Set target lags values. Required. :paramtype values: list[int] """ - super(CustomTargetLags, self).__init__(**kwargs) - self.mode = 'Custom' # type: str + super().__init__(**kwargs) + self.mode: str = "Custom" self.values = values @@ -9492,79 +9329,67 @@ class CustomTargetRollingWindowSize(TargetRollingWindowSize): All required parameters must be populated in order to send to Azure. - :ivar mode: Required. [Required] TargetRollingWindowSiz detection mode.Constant filled by - server. Possible values include: "Auto", "Custom". + :ivar mode: [Required] TargetRollingWindowSiz detection mode. Required. Known values are: + "Auto" and "Custom". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.TargetRollingWindowSizeMode - :ivar value: Required. [Required] TargetRollingWindowSize value. + :ivar value: [Required] TargetRollingWindowSize value. Required. :vartype value: int """ _validation = { - 'mode': {'required': True}, - 'value': {'required': True}, + "mode": {"required": True}, + "value": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'int'}, + "mode": {"key": "mode", "type": "str"}, + "value": {"key": "value", "type": "int"}, } - def __init__( - self, - *, - value: int, - **kwargs - ): + def __init__(self, *, value: int, **kwargs: Any) -> None: """ - :keyword value: Required. [Required] TargetRollingWindowSize value. + :keyword value: [Required] TargetRollingWindowSize value. Required. :paramtype value: int """ - super(CustomTargetRollingWindowSize, self).__init__(**kwargs) - self.mode = 'Custom' # type: str + super().__init__(**kwargs) + self.mode: str = "Custom" self.value = value -class DataImportSource(msrest.serialization.Model): +class DataImportSource(_serialization.Model): """DataImportSource. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DatabaseSource, FileSystemSource. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + DatabaseSource, FileSystemSource All required parameters must be populated in order to send to Azure. :ivar connection: Workspace connection for data import source storage. :vartype connection: str - :ivar source_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "database", "file_system". + :ivar source_type: [Required] Specifies the type of data. Required. Known values are: + "database" and "file_system". :vartype source_type: str or ~azure.mgmt.machinelearningservices.models.DataImportSourceType """ _validation = { - 'source_type': {'required': True}, + "source_type": {"required": True}, } _attribute_map = { - 'connection': {'key': 'connection', 'type': 'str'}, - 'source_type': {'key': 'sourceType', 'type': 'str'}, + "connection": {"key": "connection", "type": "str"}, + "source_type": {"key": "sourceType", "type": "str"}, } - _subtype_map = { - 'source_type': {'database': 'DatabaseSource', 'file_system': 'FileSystemSource'} - } + _subtype_map = {"source_type": {"database": "DatabaseSource", "file_system": "FileSystemSource"}} - def __init__( - self, - *, - connection: Optional[str] = None, - **kwargs - ): + def __init__(self, *, connection: Optional[str] = None, **kwargs: Any) -> None: """ :keyword connection: Workspace connection for data import source storage. :paramtype connection: str """ - super(DataImportSource, self).__init__(**kwargs) + super().__init__(**kwargs) self.connection = connection - self.source_type = None # type: Optional[str] + self.source_type: Optional[str] = None class DatabaseSource(DataImportSource): @@ -9574,8 +9399,8 @@ class DatabaseSource(DataImportSource): :ivar connection: Workspace connection for data import source storage. :vartype connection: str - :ivar source_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "database", "file_system". + :ivar source_type: [Required] Specifies the type of data. Required. Known values are: + "database" and "file_system". :vartype source_type: str or ~azure.mgmt.machinelearningservices.models.DataImportSourceType :ivar query: SQL Query statement for data import Database source. :vartype query: str @@ -9588,16 +9413,16 @@ class DatabaseSource(DataImportSource): """ _validation = { - 'source_type': {'required': True}, + "source_type": {"required": True}, } _attribute_map = { - 'connection': {'key': 'connection', 'type': 'str'}, - 'source_type': {'key': 'sourceType', 'type': 'str'}, - 'query': {'key': 'query', 'type': 'str'}, - 'stored_procedure': {'key': 'storedProcedure', 'type': 'str'}, - 'stored_procedure_params': {'key': 'storedProcedureParams', 'type': '[{str}]'}, - 'table_name': {'key': 'tableName', 'type': 'str'}, + "connection": {"key": "connection", "type": "str"}, + "source_type": {"key": "sourceType", "type": "str"}, + "query": {"key": "query", "type": "str"}, + "stored_procedure": {"key": "storedProcedure", "type": "str"}, + "stored_procedure_params": {"key": "storedProcedureParams", "type": "[{str}]"}, + "table_name": {"key": "tableName", "type": "str"}, } def __init__( @@ -9608,8 +9433,8 @@ def __init__( stored_procedure: Optional[str] = None, stored_procedure_params: Optional[List[Dict[str, str]]] = None, table_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword connection: Workspace connection for data import source storage. :paramtype connection: str @@ -9622,15 +9447,15 @@ def __init__( :keyword table_name: Name of the table on data import Database source. :paramtype table_name: str """ - super(DatabaseSource, self).__init__(connection=connection, **kwargs) - self.source_type = 'database' # type: str + super().__init__(connection=connection, **kwargs) + self.source_type: str = "database" self.query = query self.stored_procedure = stored_procedure self.stored_procedure_params = stored_procedure_params self.table_name = table_name -class DatabricksSchema(msrest.serialization.Model): +class DatabricksSchema(_serialization.Model): """DatabricksSchema. :ivar properties: Properties of Databricks. @@ -9638,24 +9463,19 @@ class DatabricksSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, + "properties": {"key": "properties", "type": "DatabricksProperties"}, } - def __init__( - self, - *, - properties: Optional["DatabricksProperties"] = None, - **kwargs - ): + def __init__(self, *, properties: Optional["_models.DatabricksProperties"] = None, **kwargs: Any) -> None: """ :keyword properties: Properties of Databricks. :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties """ - super(DatabricksSchema, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class Databricks(Compute, DatabricksSchema): +class Databricks(Compute, DatabricksSchema): # pylint: disable=too-many-instance-attributes """A DataFactory compute. Variables are only populated by the server, and will be ignored when sending a request. @@ -9664,15 +9484,15 @@ class Databricks(Compute, DatabricksSchema): :ivar properties: Properties of Databricks. :vartype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType :ivar compute_location: Location for the underlying compute. :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ProvisioningState :ivar description: The description of the Machine Learning compute. @@ -9694,38 +9514,38 @@ class Databricks(Compute, DatabricksSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'DatabricksProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "DatabricksProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } def __init__( self, *, - properties: Optional["DatabricksProperties"] = None, + properties: Optional["_models.DatabricksProperties"] = None, compute_location: Optional[str] = None, description: Optional[str] = None, resource_id: Optional[str] = None, disable_local_auth: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword properties: Properties of Databricks. :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatabricksProperties @@ -9739,9 +9559,16 @@ def __init__( MSI and AAD exclusively for authentication. :paramtype disable_local_auth: bool """ - super(Databricks, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) + super().__init__( + compute_location=compute_location, + description=description, + resource_id=resource_id, + disable_local_auth=disable_local_auth, + properties=properties, + **kwargs + ) self.properties = properties - self.compute_type = 'Databricks' # type: str + self.compute_type: str = "Databricks" self.compute_location = compute_location self.provisioning_state = None self.description = description @@ -9753,7 +9580,7 @@ def __init__( self.disable_local_auth = disable_local_auth -class DatabricksComputeSecretsProperties(msrest.serialization.Model): +class DatabricksComputeSecretsProperties(_serialization.Model): """Properties of Databricks Compute Secrets. :ivar databricks_access_token: access token for databricks account. @@ -9761,20 +9588,15 @@ class DatabricksComputeSecretsProperties(msrest.serialization.Model): """ _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, + "databricks_access_token": {"key": "databricksAccessToken", "type": "str"}, } - def __init__( - self, - *, - databricks_access_token: Optional[str] = None, - **kwargs - ): + def __init__(self, *, databricks_access_token: Optional[str] = None, **kwargs: Any) -> None: """ :keyword databricks_access_token: access token for databricks account. :paramtype databricks_access_token: str """ - super(DatabricksComputeSecretsProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.databricks_access_token = databricks_access_token @@ -9785,37 +9607,32 @@ class DatabricksComputeSecrets(ComputeSecrets, DatabricksComputeSecretsPropertie :ivar databricks_access_token: access token for databricks account. :vartype databricks_access_token: str - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType """ _validation = { - 'compute_type': {'required': True}, + "compute_type": {"required": True}, } _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, + "databricks_access_token": {"key": "databricksAccessToken", "type": "str"}, + "compute_type": {"key": "computeType", "type": "str"}, } - def __init__( - self, - *, - databricks_access_token: Optional[str] = None, - **kwargs - ): + def __init__(self, *, databricks_access_token: Optional[str] = None, **kwargs: Any) -> None: """ :keyword databricks_access_token: access token for databricks account. :paramtype databricks_access_token: str """ - super(DatabricksComputeSecrets, self).__init__(databricks_access_token=databricks_access_token, **kwargs) + super().__init__(databricks_access_token=databricks_access_token, **kwargs) self.databricks_access_token = databricks_access_token - self.compute_type = 'Databricks' # type: str + self.compute_type: str = "Databricks" -class DatabricksProperties(msrest.serialization.Model): +class DatabricksProperties(_serialization.Model): """Properties of Databricks. :ivar databricks_access_token: Databricks access token. @@ -9825,38 +9642,34 @@ class DatabricksProperties(msrest.serialization.Model): """ _attribute_map = { - 'databricks_access_token': {'key': 'databricksAccessToken', 'type': 'str'}, - 'workspace_url': {'key': 'workspaceUrl', 'type': 'str'}, + "databricks_access_token": {"key": "databricksAccessToken", "type": "str"}, + "workspace_url": {"key": "workspaceUrl", "type": "str"}, } def __init__( - self, - *, - databricks_access_token: Optional[str] = None, - workspace_url: Optional[str] = None, - **kwargs - ): + self, *, databricks_access_token: Optional[str] = None, workspace_url: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword databricks_access_token: Databricks access token. :paramtype databricks_access_token: str :keyword workspace_url: Workspace Url. :paramtype workspace_url: str """ - super(DatabricksProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.databricks_access_token = databricks_access_token self.workspace_url = workspace_url -class DataCollector(msrest.serialization.Model): +class DataCollector(_serialization.Model): """DataCollector. All required parameters must be populated in order to send to Azure. - :ivar collections: Required. [Required] The collection configuration. Each collection has it - own configuration to collect model data and the name of collection can be arbitrary string. + :ivar collections: [Required] The collection configuration. Each collection has it own + configuration to collect model data and the name of collection can be arbitrary string. Model data collector can be used for either payload logging or custom logging or both of them. Collection request and response are reserved for payload logging, others are for custom - logging. + logging. Required. :vartype collections: dict[str, ~azure.mgmt.machinelearningservices.models.Collection] :ivar request_logging: The request logging configuration for mdc, it includes advanced logging settings for all collections. It's optional. @@ -9866,34 +9679,34 @@ class DataCollector(msrest.serialization.Model): If the rolling rate is hour, all data will be collected in the blob path /yyyy/MM/dd/HH/. If it's day, all data will be collected in blob path /yyyy/MM/dd/. The other benefit of rolling path is that model monitoring ui is able to select a time range - of data very quickly. Possible values include: "Year", "Month", "Day", "Hour", "Minute". + of data very quickly. Known values are: "Year", "Month", "Day", "Hour", and "Minute". :vartype rolling_rate: str or ~azure.mgmt.machinelearningservices.models.RollingRateType """ _validation = { - 'collections': {'required': True}, + "collections": {"required": True}, } _attribute_map = { - 'collections': {'key': 'collections', 'type': '{Collection}'}, - 'request_logging': {'key': 'requestLogging', 'type': 'RequestLogging'}, - 'rolling_rate': {'key': 'rollingRate', 'type': 'str'}, + "collections": {"key": "collections", "type": "{Collection}"}, + "request_logging": {"key": "requestLogging", "type": "RequestLogging"}, + "rolling_rate": {"key": "rollingRate", "type": "str"}, } def __init__( self, *, - collections: Dict[str, "Collection"], - request_logging: Optional["RequestLogging"] = None, - rolling_rate: Optional[Union[str, "RollingRateType"]] = None, - **kwargs - ): + collections: Dict[str, "_models.Collection"], + request_logging: Optional["_models.RequestLogging"] = None, + rolling_rate: Optional[Union[str, "_models.RollingRateType"]] = None, + **kwargs: Any + ) -> None: """ - :keyword collections: Required. [Required] The collection configuration. Each collection has it - own configuration to collect model data and the name of collection can be arbitrary string. + :keyword collections: [Required] The collection configuration. Each collection has it own + configuration to collect model data and the name of collection can be arbitrary string. Model data collector can be used for either payload logging or custom logging or both of them. Collection request and response are reserved for payload logging, others are for custom - logging. + logging. Required. :paramtype collections: dict[str, ~azure.mgmt.machinelearningservices.models.Collection] :keyword request_logging: The request logging configuration for mdc, it includes advanced logging settings for all collections. It's optional. @@ -9903,10 +9716,10 @@ def __init__( If the rolling rate is hour, all data will be collected in the blob path /yyyy/MM/dd/HH/. If it's day, all data will be collected in blob path /yyyy/MM/dd/. The other benefit of rolling path is that model monitoring ui is able to select a time range - of data very quickly. Possible values include: "Year", "Month", "Day", "Hour", "Minute". + of data very quickly. Known values are: "Year", "Month", "Day", "Hour", and "Minute". :paramtype rolling_rate: str or ~azure.mgmt.machinelearningservices.models.RollingRateType """ - super(DataCollector, self).__init__(**kwargs) + super().__init__(**kwargs) self.collections = collections self.request_logging = request_logging self.rolling_rate = rolling_rate @@ -9930,37 +9743,32 @@ class DataContainer(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.DataContainerProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'DataContainerProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "DataContainerProperties"}, } - def __init__( - self, - *, - properties: "DataContainerProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.DataContainerProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.DataContainerProperties """ - super(DataContainer, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties @@ -9975,7 +9783,7 @@ class DataContainerProperties(AssetContainer): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar is_archived: Is the asset archived?. :vartype is_archived: bool @@ -9983,55 +9791,55 @@ class DataContainerProperties(AssetContainer): :vartype latest_version: str :ivar next_version: The next auto incremental version. :vartype next_version: str - :ivar data_type: Required. [Required] Specifies the type of data. Possible values include: - "uri_file", "uri_folder", "mltable". + :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", + "uri_folder", and "mltable". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'data_type': {'required': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "data_type": {"required": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "data_type": {"key": "dataType", "type": "str"}, } def __init__( self, *, - data_type: Union[str, "DataType"], + data_type: Union[str, "_models.DataType"], description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - is_archived: Optional[bool] = False, - **kwargs - ): + is_archived: bool = False, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword is_archived: Is the asset archived?. :paramtype is_archived: bool - :keyword data_type: Required. [Required] Specifies the type of data. Possible values include: - "uri_file", "uri_folder", "mltable". + :keyword data_type: [Required] Specifies the type of data. Required. Known values are: + "uri_file", "uri_folder", and "mltable". :paramtype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType """ - super(DataContainerProperties, self).__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) self.data_type = data_type -class DataContainerResourceArmPaginatedResult(msrest.serialization.Model): +class DataContainerResourceArmPaginatedResult(_serialization.Model): """A paginated list of DataContainer entities. :ivar next_link: The link to the next page of DataContainer objects. If null, there are no @@ -10042,17 +9850,13 @@ class DataContainerResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[DataContainer]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[DataContainer]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["DataContainer"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.DataContainer"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of DataContainer objects. If null, there are no additional pages. @@ -10060,7 +9864,7 @@ def __init__( :keyword value: An array of objects of type DataContainer. :paramtype value: list[~azure.mgmt.machinelearningservices.models.DataContainer] """ - super(DataContainerResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value @@ -10070,15 +9874,14 @@ class DataDriftMonitoringSignal(MonitoringSignalBase): All required parameters must be populated in order to send to Azure. - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and "Enabled". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :ivar properties: Property dictionary. Properties can be added, but not removed or altered. :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType :ivar data_segment: The data segment used for scoping on a subset of the data population. :vartype data_segment: ~azure.mgmt.machinelearningservices.models.MonitoringDataSegment @@ -10088,51 +9891,51 @@ class DataDriftMonitoringSignal(MonitoringSignalBase): ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType] :ivar features: The feature filter which identifies which feature to calculate drift over. :vartype features: ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterBase - :ivar metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. + :ivar metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. :vartype metric_thresholds: list[~azure.mgmt.machinelearningservices.models.DataDriftMetricThresholdBase] - :ivar production_data: Required. [Required] The data which drift will be calculated for. + :ivar production_data: [Required] The data which drift will be calculated for. Required. :vartype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :ivar reference_data: Required. [Required] The data to calculate drift against. + :ivar reference_data: [Required] The data to calculate drift against. Required. :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'data_segment': {'key': 'dataSegment', 'type': 'MonitoringDataSegment'}, - 'feature_data_type_override': {'key': 'featureDataTypeOverride', 'type': '{str}'}, - 'features': {'key': 'features', 'type': 'MonitoringFeatureFilterBase'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[DataDriftMetricThresholdBase]'}, - 'production_data': {'key': 'productionData', 'type': 'MonitoringInputDataBase'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "data_segment": {"key": "dataSegment", "type": "MonitoringDataSegment"}, + "feature_data_type_override": {"key": "featureDataTypeOverride", "type": "{str}"}, + "features": {"key": "features", "type": "MonitoringFeatureFilterBase"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[DataDriftMetricThresholdBase]"}, + "production_data": {"key": "productionData", "type": "MonitoringInputDataBase"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, } def __init__( self, *, - metric_thresholds: List["DataDriftMetricThresholdBase"], - production_data: "MonitoringInputDataBase", - reference_data: "MonitoringInputDataBase", - mode: Optional[Union[str, "MonitoringNotificationMode"]] = None, + metric_thresholds: List["_models.DataDriftMetricThresholdBase"], + production_data: "_models.MonitoringInputDataBase", + reference_data: "_models.MonitoringInputDataBase", + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, properties: Optional[Dict[str, str]] = None, - data_segment: Optional["MonitoringDataSegment"] = None, - feature_data_type_override: Optional[Dict[str, Union[str, "MonitoringFeatureDataType"]]] = None, - features: Optional["MonitoringFeatureFilterBase"] = None, - **kwargs - ): - """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". + data_segment: Optional["_models.MonitoringDataSegment"] = None, + feature_data_type_override: Optional[Dict[str, Union[str, "_models.MonitoringFeatureDataType"]]] = None, + features: Optional["_models.MonitoringFeatureFilterBase"] = None, + **kwargs: Any + ) -> None: + """ + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :keyword properties: Property dictionary. Properties can be added, but not removed or altered. :paramtype properties: dict[str, str] @@ -10144,17 +9947,17 @@ def __init__( ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType] :keyword features: The feature filter which identifies which feature to calculate drift over. :paramtype features: ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterBase - :keyword metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. + :keyword metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. :paramtype metric_thresholds: list[~azure.mgmt.machinelearningservices.models.DataDriftMetricThresholdBase] - :keyword production_data: Required. [Required] The data which drift will be calculated for. + :keyword production_data: [Required] The data which drift will be calculated for. Required. :paramtype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :keyword reference_data: Required. [Required] The data to calculate drift against. + :keyword reference_data: [Required] The data to calculate drift against. Required. :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ - super(DataDriftMonitoringSignal, self).__init__(mode=mode, properties=properties, **kwargs) - self.signal_type = 'DataDrift' # type: str + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "DataDrift" self.data_segment = data_segment self.feature_data_type_override = feature_data_type_override self.features = features @@ -10170,15 +9973,15 @@ class DataFactory(Compute): All required parameters must be populated in order to send to Azure. - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType :ivar compute_location: Location for the underlying compute. :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ProvisioningState :ivar description: The description of the Machine Learning compute. @@ -10200,25 +10003,25 @@ class DataFactory(Compute): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } def __init__( @@ -10228,8 +10031,8 @@ def __init__( description: Optional[str] = None, resource_id: Optional[str] = None, disable_local_auth: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword compute_location: Location for the underlying compute. :paramtype compute_location: str @@ -10241,15 +10044,21 @@ def __init__( MSI and AAD exclusively for authentication. :paramtype disable_local_auth: bool """ - super(DataFactory, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs) - self.compute_type = 'DataFactory' # type: str + super().__init__( + compute_location=compute_location, + description=description, + resource_id=resource_id, + disable_local_auth=disable_local_auth, + **kwargs + ) + self.compute_type: str = "DataFactory" class DataVersionBaseProperties(AssetBase): """Data version base definition. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MLTableData, UriFileDataVersion, UriFolderDataVersion. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + MLTableData, UriFileDataVersion, UriFolderDataVersion All required parameters must be populated in order to send to Azure. @@ -10257,7 +10066,7 @@ class DataVersionBaseProperties(AssetBase): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -10267,11 +10076,11 @@ class DataVersionBaseProperties(AssetBase): :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is provided it will be used to populate IsArchived. :vartype is_archived: bool - :ivar data_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "uri_file", "uri_folder", "mltable". + :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", + "uri_folder", and "mltable". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType - :ivar data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. + :ivar data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. :vartype data_uri: str :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual Property. @@ -10281,25 +10090,25 @@ class DataVersionBaseProperties(AssetBase): """ _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "data_type": {"required": True}, + "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, } _subtype_map = { - 'data_type': {'mltable': 'MLTableData', 'uri_file': 'UriFileDataVersion', 'uri_folder': 'UriFolderDataVersion'} + "data_type": {"mltable": "MLTableData", "uri_file": "UriFileDataVersion", "uri_folder": "UriFolderDataVersion"} } def __init__( @@ -10309,19 +10118,19 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - is_anonymous: Optional[bool] = False, - is_archived: Optional[bool] = False, - intellectual_property: Optional["IntellectualProperty"] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + intellectual_property: Optional["_models.IntellectualProperty"] = None, stage: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -10331,8 +10140,8 @@ def __init__( :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is provided it will be used to populate IsArchived. :paramtype is_archived: bool - :keyword data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. + :keyword data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. :paramtype data_uri: str :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual Property. @@ -10341,14 +10150,22 @@ def __init__( :keyword stage: Stage in the data lifecycle assigned to this data asset. :paramtype stage: str """ - super(DataVersionBaseProperties, self).__init__(description=description, properties=properties, tags=tags, auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs) - self.data_type = 'DataVersionBaseProperties' # type: str + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) + self.data_type: Optional[str] = None self.data_uri = data_uri self.intellectual_property = intellectual_property self.stage = stage -class DataImport(DataVersionBaseProperties): +class DataImport(DataVersionBaseProperties): # pylint: disable=too-many-instance-attributes """DataImport. All required parameters must be populated in order to send to Azure. @@ -10357,7 +10174,7 @@ class DataImport(DataVersionBaseProperties): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -10367,11 +10184,11 @@ class DataImport(DataVersionBaseProperties): :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is provided it will be used to populate IsArchived. :vartype is_archived: bool - :ivar data_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "uri_file", "uri_folder", "mltable". + :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", + "uri_folder", and "mltable". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType - :ivar data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. + :ivar data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. :vartype data_uri: str :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual Property. @@ -10385,23 +10202,23 @@ class DataImport(DataVersionBaseProperties): """ _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "data_type": {"required": True}, + "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'source': {'key': 'source', 'type': 'DataImportSource'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "source": {"key": "source", "type": "DataImportSource"}, } def __init__( @@ -10411,21 +10228,21 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - is_anonymous: Optional[bool] = False, - is_archived: Optional[bool] = False, - intellectual_property: Optional["IntellectualProperty"] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + intellectual_property: Optional["_models.IntellectualProperty"] = None, stage: Optional[str] = None, asset_name: Optional[str] = None, - source: Optional["DataImportSource"] = None, - **kwargs - ): + source: Optional["_models.DataImportSource"] = None, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -10435,8 +10252,8 @@ def __init__( :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is provided it will be used to populate IsArchived. :paramtype is_archived: bool - :keyword data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. + :keyword data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. :paramtype data_uri: str :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual Property. @@ -10449,13 +10266,24 @@ def __init__( :keyword source: Source data of the asset to import from. :paramtype source: ~azure.mgmt.machinelearningservices.models.DataImportSource """ - super(DataImport, self).__init__(description=description, properties=properties, tags=tags, auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, data_uri=data_uri, intellectual_property=intellectual_property, stage=stage, **kwargs) - self.data_type = 'uri_folder' # type: str + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + data_uri=data_uri, + intellectual_property=intellectual_property, + stage=stage, + **kwargs + ) + self.data_type: str = "uri_folder" self.asset_name = asset_name self.source = source -class DataLakeAnalyticsSchema(msrest.serialization.Model): +class DataLakeAnalyticsSchema(_serialization.Model): """DataLakeAnalyticsSchema. :ivar properties: @@ -10464,25 +10292,22 @@ class DataLakeAnalyticsSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsSchemaProperties'}, + "properties": {"key": "properties", "type": "DataLakeAnalyticsSchemaProperties"}, } def __init__( - self, - *, - properties: Optional["DataLakeAnalyticsSchemaProperties"] = None, - **kwargs - ): + self, *, properties: Optional["_models.DataLakeAnalyticsSchemaProperties"] = None, **kwargs: Any + ) -> None: """ :keyword properties: :paramtype properties: ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties """ - super(DataLakeAnalyticsSchema, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class DataLakeAnalytics(Compute, DataLakeAnalyticsSchema): +class DataLakeAnalytics(Compute, DataLakeAnalyticsSchema): # pylint: disable=too-many-instance-attributes """A DataLakeAnalytics compute. Variables are only populated by the server, and will be ignored when sending a request. @@ -10492,15 +10317,15 @@ class DataLakeAnalytics(Compute, DataLakeAnalyticsSchema): :ivar properties: :vartype properties: ~azure.mgmt.machinelearningservices.models.DataLakeAnalyticsSchemaProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType :ivar compute_location: Location for the underlying compute. :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ProvisioningState :ivar description: The description of the Machine Learning compute. @@ -10522,38 +10347,38 @@ class DataLakeAnalytics(Compute, DataLakeAnalyticsSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'DataLakeAnalyticsSchemaProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "DataLakeAnalyticsSchemaProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } def __init__( self, *, - properties: Optional["DataLakeAnalyticsSchemaProperties"] = None, + properties: Optional["_models.DataLakeAnalyticsSchemaProperties"] = None, compute_location: Optional[str] = None, description: Optional[str] = None, resource_id: Optional[str] = None, disable_local_auth: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword properties: :paramtype properties: @@ -10568,9 +10393,16 @@ def __init__( MSI and AAD exclusively for authentication. :paramtype disable_local_auth: bool """ - super(DataLakeAnalytics, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) + super().__init__( + compute_location=compute_location, + description=description, + resource_id=resource_id, + disable_local_auth=disable_local_auth, + properties=properties, + **kwargs + ) self.properties = properties - self.compute_type = 'DataLakeAnalytics' # type: str + self.compute_type: str = "DataLakeAnalytics" self.compute_location = compute_location self.provisioning_state = None self.description = description @@ -10582,7 +10414,7 @@ def __init__( self.disable_local_auth = disable_local_auth -class DataLakeAnalyticsSchemaProperties(msrest.serialization.Model): +class DataLakeAnalyticsSchemaProperties(_serialization.Model): """DataLakeAnalyticsSchemaProperties. :ivar data_lake_store_account_name: DataLake Store Account Name. @@ -10590,20 +10422,15 @@ class DataLakeAnalyticsSchemaProperties(msrest.serialization.Model): """ _attribute_map = { - 'data_lake_store_account_name': {'key': 'dataLakeStoreAccountName', 'type': 'str'}, + "data_lake_store_account_name": {"key": "dataLakeStoreAccountName", "type": "str"}, } - def __init__( - self, - *, - data_lake_store_account_name: Optional[str] = None, - **kwargs - ): + def __init__(self, *, data_lake_store_account_name: Optional[str] = None, **kwargs: Any) -> None: """ :keyword data_lake_store_account_name: DataLake Store Account Name. :paramtype data_lake_store_account_name: str """ - super(DataLakeAnalyticsSchemaProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.data_lake_store_account_name = data_lake_store_account_name @@ -10612,8 +10439,8 @@ class DataPathAssetReference(AssetReferenceBase): All required parameters must be populated in order to send to Azure. - :ivar reference_type: Required. [Required] Specifies the type of asset reference.Constant - filled by server. Possible values include: "Id", "DataPath", "OutputPath". + :ivar reference_type: [Required] Specifies the type of asset reference. Required. Known values + are: "Id", "DataPath", and "OutputPath". :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType :ivar datastore_id: ARM resource ID of the datastore where the asset is located. :vartype datastore_id: str @@ -10622,30 +10449,24 @@ class DataPathAssetReference(AssetReferenceBase): """ _validation = { - 'reference_type': {'required': True}, + "reference_type": {"required": True}, } _attribute_map = { - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - 'datastore_id': {'key': 'datastoreId', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'str'}, + "reference_type": {"key": "referenceType", "type": "str"}, + "datastore_id": {"key": "datastoreId", "type": "str"}, + "path": {"key": "path", "type": "str"}, } - def __init__( - self, - *, - datastore_id: Optional[str] = None, - path: Optional[str] = None, - **kwargs - ): + def __init__(self, *, datastore_id: Optional[str] = None, path: Optional[str] = None, **kwargs: Any) -> None: """ :keyword datastore_id: ARM resource ID of the datastore where the asset is located. :paramtype datastore_id: str :keyword path: The path of the file/directory in the datastore. :paramtype path: str """ - super(DataPathAssetReference, self).__init__(**kwargs) - self.reference_type = 'DataPath' # type: str + super().__init__(**kwargs) + self.reference_type: str = "DataPath" self.datastore_id = datastore_id self.path = path @@ -10655,15 +10476,14 @@ class DataQualityMonitoringSignal(MonitoringSignalBase): All required parameters must be populated in order to send to Azure. - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and "Enabled". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :ivar properties: Property dictionary. Properties can be added, but not removed or altered. :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType :ivar feature_data_type_override: A dictionary that maps feature names to their respective data types. @@ -10671,50 +10491,50 @@ class DataQualityMonitoringSignal(MonitoringSignalBase): ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType] :ivar features: The features to calculate drift over. :vartype features: ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterBase - :ivar metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. + :ivar metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. :vartype metric_thresholds: list[~azure.mgmt.machinelearningservices.models.DataQualityMetricThresholdBase] - :ivar production_data: Required. [Required] The data produced by the production service which - drift will be calculated for. + :ivar production_data: [Required] The data produced by the production service which drift will + be calculated for. Required. :vartype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :ivar reference_data: Required. [Required] The data to calculate drift against. + :ivar reference_data: [Required] The data to calculate drift against. Required. :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'feature_data_type_override': {'key': 'featureDataTypeOverride', 'type': '{str}'}, - 'features': {'key': 'features', 'type': 'MonitoringFeatureFilterBase'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[DataQualityMetricThresholdBase]'}, - 'production_data': {'key': 'productionData', 'type': 'MonitoringInputDataBase'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "feature_data_type_override": {"key": "featureDataTypeOverride", "type": "{str}"}, + "features": {"key": "features", "type": "MonitoringFeatureFilterBase"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[DataQualityMetricThresholdBase]"}, + "production_data": {"key": "productionData", "type": "MonitoringInputDataBase"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, } def __init__( self, *, - metric_thresholds: List["DataQualityMetricThresholdBase"], - production_data: "MonitoringInputDataBase", - reference_data: "MonitoringInputDataBase", - mode: Optional[Union[str, "MonitoringNotificationMode"]] = None, + metric_thresholds: List["_models.DataQualityMetricThresholdBase"], + production_data: "_models.MonitoringInputDataBase", + reference_data: "_models.MonitoringInputDataBase", + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, properties: Optional[Dict[str, str]] = None, - feature_data_type_override: Optional[Dict[str, Union[str, "MonitoringFeatureDataType"]]] = None, - features: Optional["MonitoringFeatureFilterBase"] = None, - **kwargs - ): + feature_data_type_override: Optional[Dict[str, Union[str, "_models.MonitoringFeatureDataType"]]] = None, + features: Optional["_models.MonitoringFeatureFilterBase"] = None, + **kwargs: Any + ) -> None: """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :keyword properties: Property dictionary. Properties can be added, but not removed or altered. :paramtype properties: dict[str, str] @@ -10724,18 +10544,18 @@ def __init__( ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType] :keyword features: The features to calculate drift over. :paramtype features: ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterBase - :keyword metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. + :keyword metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. :paramtype metric_thresholds: list[~azure.mgmt.machinelearningservices.models.DataQualityMetricThresholdBase] - :keyword production_data: Required. [Required] The data produced by the production service - which drift will be calculated for. + :keyword production_data: [Required] The data produced by the production service which drift + will be calculated for. Required. :paramtype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :keyword reference_data: Required. [Required] The data to calculate drift against. + :keyword reference_data: [Required] The data to calculate drift against. Required. :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ - super(DataQualityMonitoringSignal, self).__init__(mode=mode, properties=properties, **kwargs) - self.signal_type = 'DataQuality' # type: str + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "DataQuality" self.feature_data_type_override = feature_data_type_override self.features = features self.metric_thresholds = metric_thresholds @@ -10753,9 +10573,9 @@ class DatasetExportSummary(ExportSummary): :ivar end_date_time: The time when the export was completed. :vartype end_date_time: ~datetime.datetime :ivar exported_row_count: The total number of labeled datapoints exported. - :vartype exported_row_count: long - :ivar format: Required. [Required] The format of exported labels, also as the - discriminator.Constant filled by server. Possible values include: "Dataset", "Coco", "CSV". + :vartype exported_row_count: int + :ivar format: [Required] The format of exported labels, also as the discriminator. Required. + Known values are: "Dataset", "Coco", and "CSV". :vartype format: str or ~azure.mgmt.machinelearningservices.models.ExportFormatType :ivar labeling_job_id: Name and identifier of the job containing exported labels. :vartype labeling_job_id: str @@ -10766,31 +10586,27 @@ class DatasetExportSummary(ExportSummary): """ _validation = { - 'end_date_time': {'readonly': True}, - 'exported_row_count': {'readonly': True}, - 'format': {'required': True}, - 'labeling_job_id': {'readonly': True}, - 'start_date_time': {'readonly': True}, - 'labeled_asset_name': {'readonly': True}, + "end_date_time": {"readonly": True}, + "exported_row_count": {"readonly": True}, + "format": {"required": True}, + "labeling_job_id": {"readonly": True}, + "start_date_time": {"readonly": True}, + "labeled_asset_name": {"readonly": True}, } _attribute_map = { - 'end_date_time': {'key': 'endDateTime', 'type': 'iso-8601'}, - 'exported_row_count': {'key': 'exportedRowCount', 'type': 'long'}, - 'format': {'key': 'format', 'type': 'str'}, - 'labeling_job_id': {'key': 'labelingJobId', 'type': 'str'}, - 'start_date_time': {'key': 'startDateTime', 'type': 'iso-8601'}, - 'labeled_asset_name': {'key': 'labeledAssetName', 'type': 'str'}, + "end_date_time": {"key": "endDateTime", "type": "iso-8601"}, + "exported_row_count": {"key": "exportedRowCount", "type": "int"}, + "format": {"key": "format", "type": "str"}, + "labeling_job_id": {"key": "labelingJobId", "type": "str"}, + "start_date_time": {"key": "startDateTime", "type": "iso-8601"}, + "labeled_asset_name": {"key": "labeledAssetName", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(DatasetExportSummary, self).__init__(**kwargs) - self.format = 'Dataset' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.format: str = "Dataset" self.labeled_asset_name = None @@ -10812,41 +10628,36 @@ class Datastore(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.DatastoreProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'DatastoreProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "DatastoreProperties"}, } - def __init__( - self, - *, - properties: "DatastoreProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.DatastoreProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.DatastoreProperties """ - super(Datastore, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class DatastoreResourceArmPaginatedResult(msrest.serialization.Model): +class DatastoreResourceArmPaginatedResult(_serialization.Model): """A paginated list of Datastore entities. :ivar next_link: The link to the next page of Datastore objects. If null, there are no @@ -10857,17 +10668,13 @@ class DatastoreResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Datastore]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Datastore]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["Datastore"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.Datastore"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of Datastore objects. If null, there are no additional pages. @@ -10875,7 +10682,7 @@ def __init__( :keyword value: An array of objects of type Datastore. :paramtype value: list[~azure.mgmt.machinelearningservices.models.Datastore] """ - super(DatastoreResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value @@ -10898,41 +10705,36 @@ class DataVersionBase(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.DataVersionBaseProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'DataVersionBaseProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "DataVersionBaseProperties"}, } - def __init__( - self, - *, - properties: "DataVersionBaseProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.DataVersionBaseProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.DataVersionBaseProperties """ - super(DataVersionBase, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class DataVersionBaseResourceArmPaginatedResult(msrest.serialization.Model): +class DataVersionBaseResourceArmPaginatedResult(_serialization.Model): """A paginated list of DataVersionBase entities. :ivar next_link: The link to the next page of DataVersionBase objects. If null, there are no @@ -10943,17 +10745,13 @@ class DataVersionBaseResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[DataVersionBase]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[DataVersionBase]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["DataVersionBase"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.DataVersionBase"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of DataVersionBase objects. If null, there are no additional pages. @@ -10961,44 +10759,40 @@ def __init__( :keyword value: An array of objects of type DataVersionBase. :paramtype value: list[~azure.mgmt.machinelearningservices.models.DataVersionBase] """ - super(DataVersionBaseResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class OnlineScaleSettings(msrest.serialization.Model): +class OnlineScaleSettings(_serialization.Model): """Online deployment scaling configuration. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: DefaultScaleSettings, TargetUtilizationScaleSettings. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + DefaultScaleSettings, TargetUtilizationScaleSettings All required parameters must be populated in order to send to Azure. - :ivar scale_type: Required. [Required] Type of deployment scaling algorithm.Constant filled by - server. Possible values include: "Default", "TargetUtilization". + :ivar scale_type: [Required] Type of deployment scaling algorithm. Required. Known values are: + "Default" and "TargetUtilization". :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.ScaleType """ _validation = { - 'scale_type': {'required': True}, + "scale_type": {"required": True}, } _attribute_map = { - 'scale_type': {'key': 'scaleType', 'type': 'str'}, + "scale_type": {"key": "scaleType", "type": "str"}, } _subtype_map = { - 'scale_type': {'Default': 'DefaultScaleSettings', 'TargetUtilization': 'TargetUtilizationScaleSettings'} + "scale_type": {"Default": "DefaultScaleSettings", "TargetUtilization": "TargetUtilizationScaleSettings"} } - def __init__( - self, - **kwargs - ): - """ - """ - super(OnlineScaleSettings, self).__init__(**kwargs) - self.scale_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.scale_type: Optional[str] = None class DefaultScaleSettings(OnlineScaleSettings): @@ -11006,30 +10800,26 @@ class DefaultScaleSettings(OnlineScaleSettings): All required parameters must be populated in order to send to Azure. - :ivar scale_type: Required. [Required] Type of deployment scaling algorithm.Constant filled by - server. Possible values include: "Default", "TargetUtilization". + :ivar scale_type: [Required] Type of deployment scaling algorithm. Required. Known values are: + "Default" and "TargetUtilization". :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.ScaleType """ _validation = { - 'scale_type': {'required': True}, + "scale_type": {"required": True}, } _attribute_map = { - 'scale_type': {'key': 'scaleType', 'type': 'str'}, + "scale_type": {"key": "scaleType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(DefaultScaleSettings, self).__init__(**kwargs) - self.scale_type = 'Default' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.scale_type: str = "Default" -class DeploymentLogs(msrest.serialization.Model): +class DeploymentLogs(_serialization.Model): """DeploymentLogs. :ivar content: The retrieved online deployment logs. @@ -11037,58 +10827,53 @@ class DeploymentLogs(msrest.serialization.Model): """ _attribute_map = { - 'content': {'key': 'content', 'type': 'str'}, + "content": {"key": "content", "type": "str"}, } - def __init__( - self, - *, - content: Optional[str] = None, - **kwargs - ): + def __init__(self, *, content: Optional[str] = None, **kwargs: Any) -> None: """ :keyword content: The retrieved online deployment logs. :paramtype content: str """ - super(DeploymentLogs, self).__init__(**kwargs) + super().__init__(**kwargs) self.content = content -class DeploymentLogsRequest(msrest.serialization.Model): +class DeploymentLogsRequest(_serialization.Model): """DeploymentLogsRequest. - :ivar container_type: The type of container to retrieve logs from. Possible values include: - "StorageInitializer", "InferenceServer", "ModelDataCollector". + :ivar container_type: The type of container to retrieve logs from. Known values are: + "StorageInitializer", "InferenceServer", and "ModelDataCollector". :vartype container_type: str or ~azure.mgmt.machinelearningservices.models.ContainerType :ivar tail: The maximum number of lines to tail. :vartype tail: int """ _attribute_map = { - 'container_type': {'key': 'containerType', 'type': 'str'}, - 'tail': {'key': 'tail', 'type': 'int'}, + "container_type": {"key": "containerType", "type": "str"}, + "tail": {"key": "tail", "type": "int"}, } def __init__( self, *, - container_type: Optional[Union[str, "ContainerType"]] = None, + container_type: Optional[Union[str, "_models.ContainerType"]] = None, tail: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword container_type: The type of container to retrieve logs from. Possible values include: - "StorageInitializer", "InferenceServer", "ModelDataCollector". + :keyword container_type: The type of container to retrieve logs from. Known values are: + "StorageInitializer", "InferenceServer", and "ModelDataCollector". :paramtype container_type: str or ~azure.mgmt.machinelearningservices.models.ContainerType :keyword tail: The maximum number of lines to tail. :paramtype tail: int """ - super(DeploymentLogsRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.container_type = container_type self.tail = tail -class ResourceConfiguration(msrest.serialization.Model): +class ResourceConfiguration(_serialization.Model): """ResourceConfiguration. :ivar instance_count: Optional number of instances or nodes used by the compute target. @@ -11102,27 +10887,27 @@ class ResourceConfiguration(msrest.serialization.Model): For use with elastic training, currently supported by PyTorch distribution type only. :vartype max_instance_count: int :ivar properties: Additional properties bag. - :vartype properties: dict[str, any] + :vartype properties: dict[str, JSON] """ _attribute_map = { - 'instance_count': {'key': 'instanceCount', 'type': 'int'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'locations': {'key': 'locations', 'type': '[str]'}, - 'max_instance_count': {'key': 'maxInstanceCount', 'type': 'int'}, - 'properties': {'key': 'properties', 'type': '{object}'}, + "instance_count": {"key": "instanceCount", "type": "int"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "locations": {"key": "locations", "type": "[str]"}, + "max_instance_count": {"key": "maxInstanceCount", "type": "int"}, + "properties": {"key": "properties", "type": "{object}"}, } def __init__( self, *, - instance_count: Optional[int] = 1, + instance_count: int = 1, instance_type: Optional[str] = None, locations: Optional[List[str]] = None, max_instance_count: Optional[int] = None, - properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + properties: Optional[Dict[str, JSON]] = None, + **kwargs: Any + ) -> None: """ :keyword instance_count: Optional number of instances or nodes used by the compute target. :paramtype instance_count: int @@ -11135,9 +10920,9 @@ def __init__( For use with elastic training, currently supported by PyTorch distribution type only. :paramtype max_instance_count: int :keyword properties: Additional properties bag. - :paramtype properties: dict[str, any] + :paramtype properties: dict[str, JSON] """ - super(ResourceConfiguration, self).__init__(**kwargs) + super().__init__(**kwargs) self.instance_count = instance_count self.instance_type = instance_type self.locations = locations @@ -11159,27 +10944,27 @@ class DeploymentResourceConfiguration(ResourceConfiguration): For use with elastic training, currently supported by PyTorch distribution type only. :vartype max_instance_count: int :ivar properties: Additional properties bag. - :vartype properties: dict[str, any] + :vartype properties: dict[str, JSON] """ _attribute_map = { - 'instance_count': {'key': 'instanceCount', 'type': 'int'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'locations': {'key': 'locations', 'type': '[str]'}, - 'max_instance_count': {'key': 'maxInstanceCount', 'type': 'int'}, - 'properties': {'key': 'properties', 'type': '{object}'}, + "instance_count": {"key": "instanceCount", "type": "int"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "locations": {"key": "locations", "type": "[str]"}, + "max_instance_count": {"key": "maxInstanceCount", "type": "int"}, + "properties": {"key": "properties", "type": "{object}"}, } def __init__( self, *, - instance_count: Optional[int] = 1, + instance_count: int = 1, instance_type: Optional[str] = None, locations: Optional[List[str]] = None, max_instance_count: Optional[int] = None, - properties: Optional[Dict[str, Any]] = None, - **kwargs - ): + properties: Optional[Dict[str, JSON]] = None, + **kwargs: Any + ) -> None: """ :keyword instance_count: Optional number of instances or nodes used by the compute target. :paramtype instance_count: int @@ -11192,12 +10977,19 @@ def __init__( For use with elastic training, currently supported by PyTorch distribution type only. :paramtype max_instance_count: int :keyword properties: Additional properties bag. - :paramtype properties: dict[str, any] + :paramtype properties: dict[str, JSON] """ - super(DeploymentResourceConfiguration, self).__init__(instance_count=instance_count, instance_type=instance_type, locations=locations, max_instance_count=max_instance_count, properties=properties, **kwargs) + super().__init__( + instance_count=instance_count, + instance_type=instance_type, + locations=locations, + max_instance_count=max_instance_count, + properties=properties, + **kwargs + ) -class DiagnoseRequestProperties(msrest.serialization.Model): +class DiagnoseRequestProperties(_serialization.Model): """DiagnoseRequestProperties. :ivar application_insights: Setting for diagnosing dependent application insights. @@ -11212,6 +11004,9 @@ class DiagnoseRequestProperties(msrest.serialization.Model): :vartype nsg: dict[str, any] :ivar others: Setting for diagnosing unclassified category of problems. :vartype others: dict[str, any] + :ivar required_resource_providers: Setting for diagnosing the presence of required resource + providers in the workspace. + :vartype required_resource_providers: dict[str, any] :ivar resource_lock: Setting for diagnosing resource lock. :vartype resource_lock: dict[str, any] :ivar storage_account: Setting for diagnosing dependent storage account. @@ -11221,15 +11016,16 @@ class DiagnoseRequestProperties(msrest.serialization.Model): """ _attribute_map = { - 'application_insights': {'key': 'applicationInsights', 'type': '{object}'}, - 'container_registry': {'key': 'containerRegistry', 'type': '{object}'}, - 'dns_resolution': {'key': 'dnsResolution', 'type': '{object}'}, - 'key_vault': {'key': 'keyVault', 'type': '{object}'}, - 'nsg': {'key': 'nsg', 'type': '{object}'}, - 'others': {'key': 'others', 'type': '{object}'}, - 'resource_lock': {'key': 'resourceLock', 'type': '{object}'}, - 'storage_account': {'key': 'storageAccount', 'type': '{object}'}, - 'udr': {'key': 'udr', 'type': '{object}'}, + "application_insights": {"key": "applicationInsights", "type": "{object}"}, + "container_registry": {"key": "containerRegistry", "type": "{object}"}, + "dns_resolution": {"key": "dnsResolution", "type": "{object}"}, + "key_vault": {"key": "keyVault", "type": "{object}"}, + "nsg": {"key": "nsg", "type": "{object}"}, + "others": {"key": "others", "type": "{object}"}, + "required_resource_providers": {"key": "requiredResourceProviders", "type": "{object}"}, + "resource_lock": {"key": "resourceLock", "type": "{object}"}, + "storage_account": {"key": "storageAccount", "type": "{object}"}, + "udr": {"key": "udr", "type": "{object}"}, } def __init__( @@ -11241,11 +11037,12 @@ def __init__( key_vault: Optional[Dict[str, Any]] = None, nsg: Optional[Dict[str, Any]] = None, others: Optional[Dict[str, Any]] = None, + required_resource_providers: Optional[Dict[str, Any]] = None, resource_lock: Optional[Dict[str, Any]] = None, storage_account: Optional[Dict[str, Any]] = None, udr: Optional[Dict[str, Any]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword application_insights: Setting for diagnosing dependent application insights. :paramtype application_insights: dict[str, any] @@ -11259,6 +11056,9 @@ def __init__( :paramtype nsg: dict[str, any] :keyword others: Setting for diagnosing unclassified category of problems. :paramtype others: dict[str, any] + :keyword required_resource_providers: Setting for diagnosing the presence of required resource + providers in the workspace. + :paramtype required_resource_providers: dict[str, any] :keyword resource_lock: Setting for diagnosing resource lock. :paramtype resource_lock: dict[str, any] :keyword storage_account: Setting for diagnosing dependent storage account. @@ -11266,19 +11066,20 @@ def __init__( :keyword udr: Setting for diagnosing user defined routing. :paramtype udr: dict[str, any] """ - super(DiagnoseRequestProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.application_insights = application_insights self.container_registry = container_registry self.dns_resolution = dns_resolution self.key_vault = key_vault self.nsg = nsg self.others = others + self.required_resource_providers = required_resource_providers self.resource_lock = resource_lock self.storage_account = storage_account self.udr = udr -class DiagnoseResponseResult(msrest.serialization.Model): +class DiagnoseResponseResult(_serialization.Model): """DiagnoseResponseResult. :ivar value: @@ -11286,24 +11087,19 @@ class DiagnoseResponseResult(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': 'DiagnoseResponseResultValue'}, + "value": {"key": "value", "type": "DiagnoseResponseResultValue"}, } - def __init__( - self, - *, - value: Optional["DiagnoseResponseResultValue"] = None, - **kwargs - ): + def __init__(self, *, value: Optional["_models.DiagnoseResponseResultValue"] = None, **kwargs: Any) -> None: """ :keyword value: :paramtype value: ~azure.mgmt.machinelearningservices.models.DiagnoseResponseResultValue """ - super(DiagnoseResponseResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value -class DiagnoseResponseResultValue(msrest.serialization.Model): +class DiagnoseResponseResultValue(_serialization.Model): """DiagnoseResponseResultValue. :ivar user_defined_route_results: @@ -11333,31 +11129,31 @@ class DiagnoseResponseResultValue(msrest.serialization.Model): """ _attribute_map = { - 'user_defined_route_results': {'key': 'userDefinedRouteResults', 'type': '[DiagnoseResult]'}, - 'network_security_rule_results': {'key': 'networkSecurityRuleResults', 'type': '[DiagnoseResult]'}, - 'resource_lock_results': {'key': 'resourceLockResults', 'type': '[DiagnoseResult]'}, - 'dns_resolution_results': {'key': 'dnsResolutionResults', 'type': '[DiagnoseResult]'}, - 'storage_account_results': {'key': 'storageAccountResults', 'type': '[DiagnoseResult]'}, - 'key_vault_results': {'key': 'keyVaultResults', 'type': '[DiagnoseResult]'}, - 'container_registry_results': {'key': 'containerRegistryResults', 'type': '[DiagnoseResult]'}, - 'application_insights_results': {'key': 'applicationInsightsResults', 'type': '[DiagnoseResult]'}, - 'other_results': {'key': 'otherResults', 'type': '[DiagnoseResult]'}, + "user_defined_route_results": {"key": "userDefinedRouteResults", "type": "[DiagnoseResult]"}, + "network_security_rule_results": {"key": "networkSecurityRuleResults", "type": "[DiagnoseResult]"}, + "resource_lock_results": {"key": "resourceLockResults", "type": "[DiagnoseResult]"}, + "dns_resolution_results": {"key": "dnsResolutionResults", "type": "[DiagnoseResult]"}, + "storage_account_results": {"key": "storageAccountResults", "type": "[DiagnoseResult]"}, + "key_vault_results": {"key": "keyVaultResults", "type": "[DiagnoseResult]"}, + "container_registry_results": {"key": "containerRegistryResults", "type": "[DiagnoseResult]"}, + "application_insights_results": {"key": "applicationInsightsResults", "type": "[DiagnoseResult]"}, + "other_results": {"key": "otherResults", "type": "[DiagnoseResult]"}, } def __init__( self, *, - user_defined_route_results: Optional[List["DiagnoseResult"]] = None, - network_security_rule_results: Optional[List["DiagnoseResult"]] = None, - resource_lock_results: Optional[List["DiagnoseResult"]] = None, - dns_resolution_results: Optional[List["DiagnoseResult"]] = None, - storage_account_results: Optional[List["DiagnoseResult"]] = None, - key_vault_results: Optional[List["DiagnoseResult"]] = None, - container_registry_results: Optional[List["DiagnoseResult"]] = None, - application_insights_results: Optional[List["DiagnoseResult"]] = None, - other_results: Optional[List["DiagnoseResult"]] = None, - **kwargs - ): + user_defined_route_results: Optional[List["_models.DiagnoseResult"]] = None, + network_security_rule_results: Optional[List["_models.DiagnoseResult"]] = None, + resource_lock_results: Optional[List["_models.DiagnoseResult"]] = None, + dns_resolution_results: Optional[List["_models.DiagnoseResult"]] = None, + storage_account_results: Optional[List["_models.DiagnoseResult"]] = None, + key_vault_results: Optional[List["_models.DiagnoseResult"]] = None, + container_registry_results: Optional[List["_models.DiagnoseResult"]] = None, + application_insights_results: Optional[List["_models.DiagnoseResult"]] = None, + other_results: Optional[List["_models.DiagnoseResult"]] = None, + **kwargs: Any + ) -> None: """ :keyword user_defined_route_results: :paramtype user_defined_route_results: @@ -11385,7 +11181,7 @@ def __init__( :keyword other_results: :paramtype other_results: list[~azure.mgmt.machinelearningservices.models.DiagnoseResult] """ - super(DiagnoseResponseResultValue, self).__init__(**kwargs) + super().__init__(**kwargs) self.user_defined_route_results = user_defined_route_results self.network_security_rule_results = network_security_rule_results self.resource_lock_results = resource_lock_results @@ -11397,14 +11193,14 @@ def __init__( self.other_results = other_results -class DiagnoseResult(msrest.serialization.Model): +class DiagnoseResult(_serialization.Model): """Result of Diagnose. Variables are only populated by the server, and will be ignored when sending a request. :ivar code: Code for workspace setup error. :vartype code: str - :ivar level: Level of workspace setup error. Possible values include: "Warning", "Error", + :ivar level: Level of workspace setup error. Known values are: "Warning", "Error", and "Information". :vartype level: str or ~azure.mgmt.machinelearningservices.models.DiagnoseResultLevel :ivar message: Message of workspace setup error. @@ -11412,30 +11208,26 @@ class DiagnoseResult(msrest.serialization.Model): """ _validation = { - 'code': {'readonly': True}, - 'level': {'readonly': True}, - 'message': {'readonly': True}, + "code": {"readonly": True}, + "level": {"readonly": True}, + "message": {"readonly": True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'level': {'key': 'level', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, + "code": {"key": "code", "type": "str"}, + "level": {"key": "level", "type": "str"}, + "message": {"key": "message", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(DiagnoseResult, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.code = None self.level = None self.message = None -class DiagnoseWorkspaceParameters(msrest.serialization.Model): +class DiagnoseWorkspaceParameters(_serialization.Model): """Parameters to diagnose a workspace. :ivar value: @@ -11443,60 +11235,48 @@ class DiagnoseWorkspaceParameters(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': 'DiagnoseRequestProperties'}, + "value": {"key": "value", "type": "DiagnoseRequestProperties"}, } - def __init__( - self, - *, - value: Optional["DiagnoseRequestProperties"] = None, - **kwargs - ): + def __init__(self, *, value: Optional["_models.DiagnoseRequestProperties"] = None, **kwargs: Any) -> None: """ :keyword value: :paramtype value: ~azure.mgmt.machinelearningservices.models.DiagnoseRequestProperties """ - super(DiagnoseWorkspaceParameters, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value -class DistributionConfiguration(msrest.serialization.Model): +class DistributionConfiguration(_serialization.Model): """Base definition for job distribution configuration. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: Mpi, PyTorch, Ray, TensorFlow. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + Mpi, PyTorch, Ray, TensorFlow All required parameters must be populated in order to send to Azure. - :ivar distribution_type: Required. [Required] Specifies the type of distribution - framework.Constant filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi", - "Ray". + :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. + Known values are: "PyTorch", "TensorFlow", "Mpi", and "Ray". :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType """ _validation = { - 'distribution_type': {'required': True}, + "distribution_type": {"required": True}, } _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, + "distribution_type": {"key": "distributionType", "type": "str"}, } - _subtype_map = { - 'distribution_type': {'Mpi': 'Mpi', 'PyTorch': 'PyTorch', 'Ray': 'Ray', 'TensorFlow': 'TensorFlow'} - } + _subtype_map = {"distribution_type": {"Mpi": "Mpi", "PyTorch": "PyTorch", "Ray": "Ray", "TensorFlow": "TensorFlow"}} - def __init__( - self, - **kwargs - ): - """ - """ - super(DistributionConfiguration, self).__init__(**kwargs) - self.distribution_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.distribution_type: Optional[str] = None -class Docker(msrest.serialization.Model): +class Docker(_serialization.Model): """Docker. :ivar additional_properties: Unmatched properties from the message are deserialized to this @@ -11507,8 +11287,8 @@ class Docker(msrest.serialization.Model): """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'privileged': {'key': 'privileged', 'type': 'bool'}, + "additional_properties": {"key": "", "type": "{object}"}, + "privileged": {"key": "privileged", "type": "bool"}, } def __init__( @@ -11516,8 +11296,8 @@ def __init__( *, additional_properties: Optional[Dict[str, Any]] = None, privileged: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. @@ -11525,7 +11305,7 @@ def __init__( :keyword privileged: Indicate whether container shall run in privileged or non-privileged mode. :paramtype privileged: bool """ - super(Docker, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.privileged = privileged @@ -11535,8 +11315,8 @@ class EmailMonitoringAlertNotificationSettings(MonitoringAlertNotificationSettin All required parameters must be populated in order to send to Azure. - :ivar alert_notification_type: Required. [Required] Specifies the type of signal to - monitor.Constant filled by server. Possible values include: "AzureMonitor", "Email". + :ivar alert_notification_type: [Required] Specifies the type of signal to monitor. Required. + Known values are: "AzureMonitor" and "Email". :vartype alert_notification_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationType :ivar email_notification_setting: Configuration for notification. @@ -11545,31 +11325,28 @@ class EmailMonitoringAlertNotificationSettings(MonitoringAlertNotificationSettin """ _validation = { - 'alert_notification_type': {'required': True}, + "alert_notification_type": {"required": True}, } _attribute_map = { - 'alert_notification_type': {'key': 'alertNotificationType', 'type': 'str'}, - 'email_notification_setting': {'key': 'emailNotificationSetting', 'type': 'NotificationSetting'}, + "alert_notification_type": {"key": "alertNotificationType", "type": "str"}, + "email_notification_setting": {"key": "emailNotificationSetting", "type": "NotificationSetting"}, } def __init__( - self, - *, - email_notification_setting: Optional["NotificationSetting"] = None, - **kwargs - ): + self, *, email_notification_setting: Optional["_models.NotificationSetting"] = None, **kwargs: Any + ) -> None: """ :keyword email_notification_setting: Configuration for notification. :paramtype email_notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting """ - super(EmailMonitoringAlertNotificationSettings, self).__init__(**kwargs) - self.alert_notification_type = 'Email' # type: str + super().__init__(**kwargs) + self.alert_notification_type: str = "Email" self.email_notification_setting = email_notification_setting -class EncryptionKeyVaultUpdateProperties(msrest.serialization.Model): +class EncryptionKeyVaultUpdateProperties(_serialization.Model): """EncryptionKeyVaultUpdateProperties. All required parameters must be populated in order to send to Azure. @@ -11579,28 +11356,23 @@ class EncryptionKeyVaultUpdateProperties(msrest.serialization.Model): """ _validation = { - 'key_identifier': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "key_identifier": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, + "key_identifier": {"key": "keyIdentifier", "type": "str"}, } - def __init__( - self, - *, - key_identifier: str, - **kwargs - ): + def __init__(self, *, key_identifier: str, **kwargs: Any) -> None: """ :keyword key_identifier: Required. :paramtype key_identifier: str """ - super(EncryptionKeyVaultUpdateProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.key_identifier = key_identifier -class EncryptionProperty(msrest.serialization.Model): +class EncryptionProperty(_serialization.Model): """EncryptionProperty. All required parameters must be populated in order to send to Azure. @@ -11611,14 +11383,14 @@ class EncryptionProperty(msrest.serialization.Model): :vartype cosmos_db_resource_id: str :ivar identity: Identity to be used with the keyVault. :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityForCmk - :ivar key_vault_properties: Required. KeyVault details to do the encryption. + :ivar key_vault_properties: KeyVault details to do the encryption. Required. :vartype key_vault_properties: ~azure.mgmt.machinelearningservices.models.KeyVaultProperties :ivar search_account_resource_id: The byok search account that customer brings to store customer's data with encryption. :vartype search_account_resource_id: str - :ivar status: Required. Indicates whether or not the encryption is enabled for the workspace. - Possible values include: "Enabled", "Disabled". + :ivar status: Indicates whether or not the encryption is enabled for the workspace. Required. + Known values are: "Enabled" and "Disabled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.EncryptionStatus :ivar storage_account_resource_id: The byok storage account that customer brings to store customer's data @@ -11627,30 +11399,30 @@ class EncryptionProperty(msrest.serialization.Model): """ _validation = { - 'key_vault_properties': {'required': True}, - 'status': {'required': True}, + "key_vault_properties": {"required": True}, + "status": {"required": True}, } _attribute_map = { - 'cosmos_db_resource_id': {'key': 'cosmosDbResourceId', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityForCmk'}, - 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'KeyVaultProperties'}, - 'search_account_resource_id': {'key': 'searchAccountResourceId', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'storage_account_resource_id': {'key': 'storageAccountResourceId', 'type': 'str'}, + "cosmos_db_resource_id": {"key": "cosmosDbResourceId", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityForCmk"}, + "key_vault_properties": {"key": "keyVaultProperties", "type": "KeyVaultProperties"}, + "search_account_resource_id": {"key": "searchAccountResourceId", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "storage_account_resource_id": {"key": "storageAccountResourceId", "type": "str"}, } def __init__( self, *, - key_vault_properties: "KeyVaultProperties", - status: Union[str, "EncryptionStatus"], + key_vault_properties: "_models.KeyVaultProperties", + status: Union[str, "_models.EncryptionStatus"], cosmos_db_resource_id: Optional[str] = None, - identity: Optional["IdentityForCmk"] = None, + identity: Optional["_models.IdentityForCmk"] = None, search_account_resource_id: Optional[str] = None, storage_account_resource_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword cosmos_db_resource_id: The byok cosmosdb account that customer brings to store customer's data @@ -11658,21 +11430,21 @@ def __init__( :paramtype cosmos_db_resource_id: str :keyword identity: Identity to be used with the keyVault. :paramtype identity: ~azure.mgmt.machinelearningservices.models.IdentityForCmk - :keyword key_vault_properties: Required. KeyVault details to do the encryption. + :keyword key_vault_properties: KeyVault details to do the encryption. Required. :paramtype key_vault_properties: ~azure.mgmt.machinelearningservices.models.KeyVaultProperties :keyword search_account_resource_id: The byok search account that customer brings to store customer's data with encryption. :paramtype search_account_resource_id: str - :keyword status: Required. Indicates whether or not the encryption is enabled for the - workspace. Possible values include: "Enabled", "Disabled". + :keyword status: Indicates whether or not the encryption is enabled for the workspace. + Required. Known values are: "Enabled" and "Disabled". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.EncryptionStatus :keyword storage_account_resource_id: The byok storage account that customer brings to store customer's data with encryption. :paramtype storage_account_resource_id: str """ - super(EncryptionProperty, self).__init__(**kwargs) + super().__init__(**kwargs) self.cosmos_db_resource_id = cosmos_db_resource_id self.identity = identity self.key_vault_properties = key_vault_properties @@ -11681,7 +11453,7 @@ def __init__( self.storage_account_resource_id = storage_account_resource_id -class EncryptionUpdateProperties(msrest.serialization.Model): +class EncryptionUpdateProperties(_serialization.Model): """EncryptionUpdateProperties. All required parameters must be populated in order to send to Azure. @@ -11692,33 +11464,28 @@ class EncryptionUpdateProperties(msrest.serialization.Model): """ _validation = { - 'key_vault_properties': {'required': True}, + "key_vault_properties": {"required": True}, } _attribute_map = { - 'key_vault_properties': {'key': 'keyVaultProperties', 'type': 'EncryptionKeyVaultUpdateProperties'}, + "key_vault_properties": {"key": "keyVaultProperties", "type": "EncryptionKeyVaultUpdateProperties"}, } - def __init__( - self, - *, - key_vault_properties: "EncryptionKeyVaultUpdateProperties", - **kwargs - ): + def __init__(self, *, key_vault_properties: "_models.EncryptionKeyVaultUpdateProperties", **kwargs: Any) -> None: """ :keyword key_vault_properties: Required. :paramtype key_vault_properties: ~azure.mgmt.machinelearningservices.models.EncryptionKeyVaultUpdateProperties """ - super(EncryptionUpdateProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.key_vault_properties = key_vault_properties -class Endpoint(msrest.serialization.Model): +class Endpoint(_serialization.Model): """Endpoint. - :ivar protocol: Protocol over which communication will happen over this endpoint. Possible - values include: "tcp", "udp", "http". Default value: "tcp". + :ivar protocol: Protocol over which communication will happen over this endpoint. Known values + are: "tcp", "udp", and "http". :vartype protocol: str or ~azure.mgmt.machinelearningservices.models.Protocol :ivar name: Name of the Endpoint. :vartype name: str @@ -11731,26 +11498,26 @@ class Endpoint(msrest.serialization.Model): """ _attribute_map = { - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'int'}, - 'published': {'key': 'published', 'type': 'int'}, - 'host_ip': {'key': 'hostIp', 'type': 'str'}, + "protocol": {"key": "protocol", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "target": {"key": "target", "type": "int"}, + "published": {"key": "published", "type": "int"}, + "host_ip": {"key": "hostIp", "type": "str"}, } def __init__( self, *, - protocol: Optional[Union[str, "Protocol"]] = "tcp", + protocol: Union[str, "_models.Protocol"] = "tcp", name: Optional[str] = None, target: Optional[int] = None, published: Optional[int] = None, host_ip: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword protocol: Protocol over which communication will happen over this endpoint. Possible - values include: "tcp", "udp", "http". Default value: "tcp". + :keyword protocol: Protocol over which communication will happen over this endpoint. Known + values are: "tcp", "udp", and "http". :paramtype protocol: str or ~azure.mgmt.machinelearningservices.models.Protocol :keyword name: Name of the Endpoint. :paramtype name: str @@ -11761,7 +11528,7 @@ def __init__( :keyword host_ip: Host IP over which the application is exposed from the container. :paramtype host_ip: str """ - super(Endpoint, self).__init__(**kwargs) + super().__init__(**kwargs) self.protocol = protocol self.name = name self.target = target @@ -11769,7 +11536,7 @@ def __init__( self.host_ip = host_ip -class EndpointAuthKeys(msrest.serialization.Model): +class EndpointAuthKeys(_serialization.Model): """Keys for endpoint authentication. :ivar primary_key: The primary key. @@ -11779,68 +11546,64 @@ class EndpointAuthKeys(msrest.serialization.Model): """ _attribute_map = { - 'primary_key': {'key': 'primaryKey', 'type': 'str'}, - 'secondary_key': {'key': 'secondaryKey', 'type': 'str'}, + "primary_key": {"key": "primaryKey", "type": "str"}, + "secondary_key": {"key": "secondaryKey", "type": "str"}, } def __init__( - self, - *, - primary_key: Optional[str] = None, - secondary_key: Optional[str] = None, - **kwargs - ): + self, *, primary_key: Optional[str] = None, secondary_key: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword primary_key: The primary key. :paramtype primary_key: str :keyword secondary_key: The secondary key. :paramtype secondary_key: str """ - super(EndpointAuthKeys, self).__init__(**kwargs) + super().__init__(**kwargs) self.primary_key = primary_key self.secondary_key = secondary_key -class EndpointAuthToken(msrest.serialization.Model): +class EndpointAuthToken(_serialization.Model): """Service Token. :ivar access_token: Access token for endpoint authentication. :vartype access_token: str :ivar expiry_time_utc: Access token expiry time (UTC). - :vartype expiry_time_utc: long + :vartype expiry_time_utc: int :ivar refresh_after_time_utc: Refresh access token after time (UTC). - :vartype refresh_after_time_utc: long + :vartype refresh_after_time_utc: int :ivar token_type: Access token type. :vartype token_type: str """ _attribute_map = { - 'access_token': {'key': 'accessToken', 'type': 'str'}, - 'expiry_time_utc': {'key': 'expiryTimeUtc', 'type': 'long'}, - 'refresh_after_time_utc': {'key': 'refreshAfterTimeUtc', 'type': 'long'}, - 'token_type': {'key': 'tokenType', 'type': 'str'}, + "access_token": {"key": "accessToken", "type": "str"}, + "expiry_time_utc": {"key": "expiryTimeUtc", "type": "int"}, + "refresh_after_time_utc": {"key": "refreshAfterTimeUtc", "type": "int"}, + "token_type": {"key": "tokenType", "type": "str"}, } def __init__( self, *, access_token: Optional[str] = None, - expiry_time_utc: Optional[int] = 0, - refresh_after_time_utc: Optional[int] = 0, + expiry_time_utc: int = 0, + refresh_after_time_utc: int = 0, token_type: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword access_token: Access token for endpoint authentication. :paramtype access_token: str :keyword expiry_time_utc: Access token expiry time (UTC). - :paramtype expiry_time_utc: long + :paramtype expiry_time_utc: int :keyword refresh_after_time_utc: Refresh access token after time (UTC). - :paramtype refresh_after_time_utc: long + :paramtype refresh_after_time_utc: int :keyword token_type: Access token type. :paramtype token_type: str """ - super(EndpointAuthToken, self).__init__(**kwargs) + super().__init__(**kwargs) self.access_token = access_token self.expiry_time_utc = expiry_time_utc self.refresh_after_time_utc = refresh_after_time_utc @@ -11852,48 +11615,40 @@ class EndpointScheduleAction(ScheduleActionBase): All required parameters must be populated in order to send to Azure. - :ivar action_type: Required. [Required] Specifies the action type of the schedule.Constant - filled by server. Possible values include: "CreateJob", "InvokeBatchEndpoint", "ImportData", - "CreateMonitor". + :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values + are: "CreateJob", "InvokeBatchEndpoint", "ImportData", and "CreateMonitor". :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType - :ivar endpoint_invocation_definition: Required. [Required] Defines Schedule action definition - details. - - - .. raw:: html - - . - :vartype endpoint_invocation_definition: any - """ + :ivar endpoint_invocation_definition: [Required] Defines Schedule action definition details. + + + .. raw:: html + + . Required. + :vartype endpoint_invocation_definition: JSON + """ _validation = { - 'action_type': {'required': True}, - 'endpoint_invocation_definition': {'required': True}, + "action_type": {"required": True}, + "endpoint_invocation_definition": {"required": True}, } _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, - 'endpoint_invocation_definition': {'key': 'endpointInvocationDefinition', 'type': 'object'}, + "action_type": {"key": "actionType", "type": "str"}, + "endpoint_invocation_definition": {"key": "endpointInvocationDefinition", "type": "object"}, } - def __init__( - self, - *, - endpoint_invocation_definition: Any, - **kwargs - ): + def __init__(self, *, endpoint_invocation_definition: JSON, **kwargs: Any) -> None: """ - :keyword endpoint_invocation_definition: Required. [Required] Defines Schedule action - definition details. - - + :keyword endpoint_invocation_definition: [Required] Defines Schedule action definition details. + + .. raw:: html - - . - :paramtype endpoint_invocation_definition: any + + . Required. + :paramtype endpoint_invocation_definition: JSON """ - super(EndpointScheduleAction, self).__init__(**kwargs) - self.action_type = 'InvokeBatchEndpoint' # type: str + super().__init__(**kwargs) + self.action_type: str = "InvokeBatchEndpoint" self.endpoint_invocation_definition = endpoint_invocation_definition @@ -11915,38 +11670,33 @@ class EnvironmentContainer(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentContainerProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'EnvironmentContainerProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "EnvironmentContainerProperties"}, } - def __init__( - self, - *, - properties: "EnvironmentContainerProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.EnvironmentContainerProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentContainerProperties """ - super(EnvironmentContainer, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties @@ -11959,7 +11709,7 @@ class EnvironmentContainerProperties(AssetContainer): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar is_archived: Is the asset archived?. :vartype is_archived: bool @@ -11967,26 +11717,26 @@ class EnvironmentContainerProperties(AssetContainer): :vartype latest_version: str :ivar next_version: The next auto incremental version. :vartype next_version: str - :ivar provisioning_state: Provisioning state for the environment container. Possible values - include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :ivar provisioning_state: Provisioning state for the environment container. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.AssetProvisioningState """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } def __init__( @@ -11995,24 +11745,24 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - is_archived: Optional[bool] = False, - **kwargs - ): + is_archived: bool = False, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword is_archived: Is the asset archived?. :paramtype is_archived: bool """ - super(EnvironmentContainerProperties, self).__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) self.provisioning_state = None -class EnvironmentContainerResourceArmPaginatedResult(msrest.serialization.Model): +class EnvironmentContainerResourceArmPaginatedResult(_serialization.Model): """A paginated list of EnvironmentContainer entities. :ivar next_link: The link to the next page of EnvironmentContainer objects. If null, there are @@ -12023,17 +11773,17 @@ class EnvironmentContainerResourceArmPaginatedResult(msrest.serialization.Model) """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[EnvironmentContainer]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[EnvironmentContainer]"}, } def __init__( self, *, next_link: Optional[str] = None, - value: Optional[List["EnvironmentContainer"]] = None, - **kwargs - ): + value: Optional[List["_models.EnvironmentContainer"]] = None, + **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of EnvironmentContainer objects. If null, there are no additional pages. @@ -12041,49 +11791,49 @@ def __init__( :keyword value: An array of objects of type EnvironmentContainer. :paramtype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] """ - super(EnvironmentContainerResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class EnvironmentVariable(msrest.serialization.Model): +class EnvironmentVariable(_serialization.Model): """EnvironmentVariable. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. :vartype additional_properties: dict[str, any] :ivar type: Type of the Environment Variable. Possible values are: local - For local variable. - Possible values include: "local". Default value: "local". + "local" :vartype type: str or ~azure.mgmt.machinelearningservices.models.EnvironmentVariableType :ivar value: Value of the Environment variable. :vartype value: str """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "value": {"key": "value", "type": "str"}, } def __init__( self, *, additional_properties: Optional[Dict[str, Any]] = None, - type: Optional[Union[str, "EnvironmentVariableType"]] = "local", + type: Union[str, "_models.EnvironmentVariableType"] = "local", value: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, any] :keyword type: Type of the Environment Variable. Possible values are: local - For local - variable. Possible values include: "local". Default value: "local". + variable. "local" :paramtype type: str or ~azure.mgmt.machinelearningservices.models.EnvironmentVariableType :keyword value: Value of the Environment variable. :paramtype value: str """ - super(EnvironmentVariable, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.type = type self.value = value @@ -12107,41 +11857,36 @@ class EnvironmentVersion(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentVersionProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'EnvironmentVersionProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "EnvironmentVersionProperties"}, } - def __init__( - self, - *, - properties: "EnvironmentVersionProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.EnvironmentVersionProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.EnvironmentVersionProperties """ - super(EnvironmentVersion, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class EnvironmentVersionProperties(AssetBase): +class EnvironmentVersionProperties(AssetBase): # pylint: disable=too-many-instance-attributes """Environment version details. Variables are only populated by the server, and will be ignored when sending a request. @@ -12150,7 +11895,7 @@ class EnvironmentVersionProperties(AssetBase): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -12160,36 +11905,36 @@ class EnvironmentVersionProperties(AssetBase): :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is provided it will be used to populate IsArchived. :vartype is_archived: bool - :ivar auto_rebuild: Defines if image needs to be rebuilt based on base image changes. Possible - values include: "Disabled", "OnBaseImageUpdate". + :ivar auto_rebuild: Defines if image needs to be rebuilt based on base image changes. Known + values are: "Disabled" and "OnBaseImageUpdate". :vartype auto_rebuild: str or ~azure.mgmt.machinelearningservices.models.AutoRebuildSetting :ivar build: Configuration settings for Docker build context. :vartype build: ~azure.mgmt.machinelearningservices.models.BuildContext :ivar conda_file: Standard configuration file used by Conda that lets you install any kind of package, including Python, R, and C/C++ packages. - - + + .. raw:: html - + . :vartype conda_file: str :ivar environment_type: Environment type is either user managed or curated by the Azure ML service - - + + .. raw:: html - + . Possible values include: "Curated", "UserCreated". + />. Known values are: "Curated" and "UserCreated". :vartype environment_type: str or ~azure.mgmt.machinelearningservices.models.EnvironmentType :ivar image: Name of the image that will be used for the environment. - - + + .. raw:: html - + . @@ -12200,10 +11945,10 @@ class EnvironmentVersionProperties(AssetBase): :ivar intellectual_property: Intellectual Property details. Used if environment is an Intellectual Property. :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :ivar os_type: The OS type of the environment. Possible values include: "Linux", "Windows". + :ivar os_type: The OS type of the environment. Known values are: "Linux" and "Windows". :vartype os_type: str or ~azure.mgmt.machinelearningservices.models.OperatingSystemType - :ivar provisioning_state: Provisioning state for the environment version. Possible values - include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :ivar provisioning_state: Provisioning state for the environment version. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.AssetProvisioningState :ivar stage: Stage in the environment lifecycle assigned to this environment. @@ -12211,27 +11956,27 @@ class EnvironmentVersionProperties(AssetBase): """ _validation = { - 'environment_type': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "environment_type": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'auto_rebuild': {'key': 'autoRebuild', 'type': 'str'}, - 'build': {'key': 'build', 'type': 'BuildContext'}, - 'conda_file': {'key': 'condaFile', 'type': 'str'}, - 'environment_type': {'key': 'environmentType', 'type': 'str'}, - 'image': {'key': 'image', 'type': 'str'}, - 'inference_config': {'key': 'inferenceConfig', 'type': 'InferenceContainerProperties'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'os_type': {'key': 'osType', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "auto_rebuild": {"key": "autoRebuild", "type": "str"}, + "build": {"key": "build", "type": "BuildContext"}, + "conda_file": {"key": "condaFile", "type": "str"}, + "environment_type": {"key": "environmentType", "type": "str"}, + "image": {"key": "image", "type": "str"}, + "inference_config": {"key": "inferenceConfig", "type": "InferenceContainerProperties"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "os_type": {"key": "osType", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "stage": {"key": "stage", "type": "str"}, } def __init__( @@ -12240,25 +11985,25 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - is_anonymous: Optional[bool] = False, - is_archived: Optional[bool] = False, - auto_rebuild: Optional[Union[str, "AutoRebuildSetting"]] = None, - build: Optional["BuildContext"] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + auto_rebuild: Optional[Union[str, "_models.AutoRebuildSetting"]] = None, + build: Optional["_models.BuildContext"] = None, conda_file: Optional[str] = None, image: Optional[str] = None, - inference_config: Optional["InferenceContainerProperties"] = None, - intellectual_property: Optional["IntellectualProperty"] = None, - os_type: Optional[Union[str, "OperatingSystemType"]] = None, + inference_config: Optional["_models.InferenceContainerProperties"] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, + os_type: Optional[Union[str, "_models.OperatingSystemType"]] = None, stage: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -12268,26 +12013,26 @@ def __init__( :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is provided it will be used to populate IsArchived. :paramtype is_archived: bool - :keyword auto_rebuild: Defines if image needs to be rebuilt based on base image changes. - Possible values include: "Disabled", "OnBaseImageUpdate". + :keyword auto_rebuild: Defines if image needs to be rebuilt based on base image changes. Known + values are: "Disabled" and "OnBaseImageUpdate". :paramtype auto_rebuild: str or ~azure.mgmt.machinelearningservices.models.AutoRebuildSetting :keyword build: Configuration settings for Docker build context. :paramtype build: ~azure.mgmt.machinelearningservices.models.BuildContext :keyword conda_file: Standard configuration file used by Conda that lets you install any kind of package, including Python, R, and C/C++ packages. - - + + .. raw:: html - + . :paramtype conda_file: str :keyword image: Name of the image that will be used for the environment. - - + + .. raw:: html - + . @@ -12299,12 +12044,20 @@ def __init__( Intellectual Property. :paramtype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword os_type: The OS type of the environment. Possible values include: "Linux", "Windows". + :keyword os_type: The OS type of the environment. Known values are: "Linux" and "Windows". :paramtype os_type: str or ~azure.mgmt.machinelearningservices.models.OperatingSystemType :keyword stage: Stage in the environment lifecycle assigned to this environment. :paramtype stage: str """ - super(EnvironmentVersionProperties, self).__init__(description=description, properties=properties, tags=tags, auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs) + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) self.auto_rebuild = auto_rebuild self.build = build self.conda_file = conda_file @@ -12317,7 +12070,7 @@ def __init__( self.stage = stage -class EnvironmentVersionResourceArmPaginatedResult(msrest.serialization.Model): +class EnvironmentVersionResourceArmPaginatedResult(_serialization.Model): """A paginated list of EnvironmentVersion entities. :ivar next_link: The link to the next page of EnvironmentVersion objects. If null, there are no @@ -12328,17 +12081,17 @@ class EnvironmentVersionResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[EnvironmentVersion]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[EnvironmentVersion]"}, } def __init__( self, *, next_link: Optional[str] = None, - value: Optional[List["EnvironmentVersion"]] = None, - **kwargs - ): + value: Optional[List["_models.EnvironmentVersion"]] = None, + **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of EnvironmentVersion objects. If null, there are no additional pages. @@ -12346,12 +12099,12 @@ def __init__( :keyword value: An array of objects of type EnvironmentVersion. :paramtype value: list[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] """ - super(EnvironmentVersionResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class ErrorAdditionalInfo(msrest.serialization.Model): +class ErrorAdditionalInfo(_serialization.Model): """The resource management error additional info. Variables are only populated by the server, and will be ignored when sending a request. @@ -12359,31 +12112,27 @@ class ErrorAdditionalInfo(msrest.serialization.Model): :ivar type: The additional info type. :vartype type: str :ivar info: The additional info. - :vartype info: any + :vartype info: JSON """ _validation = { - 'type': {'readonly': True}, - 'info': {'readonly': True}, + "type": {"readonly": True}, + "info": {"readonly": True}, } _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'info': {'key': 'info', 'type': 'object'}, + "type": {"key": "type", "type": "str"}, + "info": {"key": "info", "type": "object"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ErrorAdditionalInfo, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.type = None self.info = None -class ErrorDetail(msrest.serialization.Model): +class ErrorDetail(_serialization.Model): """The error detail. Variables are only populated by the server, and will be ignored when sending a request. @@ -12401,28 +12150,24 @@ class ErrorDetail(msrest.serialization.Model): """ _validation = { - 'code': {'readonly': True}, - 'message': {'readonly': True}, - 'target': {'readonly': True}, - 'details': {'readonly': True}, - 'additional_info': {'readonly': True}, + "code": {"readonly": True}, + "message": {"readonly": True}, + "target": {"readonly": True}, + "details": {"readonly": True}, + "additional_info": {"readonly": True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, - 'details': {'key': 'details', 'type': '[ErrorDetail]'}, - 'additional_info': {'key': 'additionalInfo', 'type': '[ErrorAdditionalInfo]'}, + "code": {"key": "code", "type": "str"}, + "message": {"key": "message", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "details": {"key": "details", "type": "[ErrorDetail]"}, + "additional_info": {"key": "additionalInfo", "type": "[ErrorAdditionalInfo]"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ErrorDetail, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.code = None self.message = None self.target = None @@ -12430,136 +12175,132 @@ def __init__( self.additional_info = None -class ErrorResponse(msrest.serialization.Model): - """Common error response for all Azure Resource Manager APIs to return error details for failed operations. (This also follows the OData error response format.). +class ErrorResponse(_serialization.Model): + """Common error response for all Azure Resource Manager APIs to return error details for failed + operations. (This also follows the OData error response format.). :ivar error: The error object. :vartype error: ~azure.mgmt.machinelearningservices.models.ErrorDetail """ _attribute_map = { - 'error': {'key': 'error', 'type': 'ErrorDetail'}, + "error": {"key": "error", "type": "ErrorDetail"}, } - def __init__( - self, - *, - error: Optional["ErrorDetail"] = None, - **kwargs - ): + def __init__(self, *, error: Optional["_models.ErrorDetail"] = None, **kwargs: Any) -> None: """ :keyword error: The error object. :paramtype error: ~azure.mgmt.machinelearningservices.models.ErrorDetail """ - super(ErrorResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.error = error -class EstimatedVMPrice(msrest.serialization.Model): +class EstimatedVMPrice(_serialization.Model): """The estimated price info for using a VM of a particular OS type, tier, etc. All required parameters must be populated in order to send to Azure. - :ivar retail_price: Required. The price charged for using the VM. + :ivar retail_price: The price charged for using the VM. Required. :vartype retail_price: float - :ivar os_type: Required. Operating system type used by the VM. Possible values include: - "Linux", "Windows". + :ivar os_type: Operating system type used by the VM. Required. Known values are: "Linux" and + "Windows". :vartype os_type: str or ~azure.mgmt.machinelearningservices.models.VMPriceOSType - :ivar vm_tier: Required. The type of the VM. Possible values include: "Standard", - "LowPriority", "Spot". + :ivar vm_tier: The type of the VM. Required. Known values are: "Standard", "LowPriority", and + "Spot". :vartype vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier """ _validation = { - 'retail_price': {'required': True}, - 'os_type': {'required': True}, - 'vm_tier': {'required': True}, + "retail_price": {"required": True}, + "os_type": {"required": True}, + "vm_tier": {"required": True}, } _attribute_map = { - 'retail_price': {'key': 'retailPrice', 'type': 'float'}, - 'os_type': {'key': 'osType', 'type': 'str'}, - 'vm_tier': {'key': 'vmTier', 'type': 'str'}, + "retail_price": {"key": "retailPrice", "type": "float"}, + "os_type": {"key": "osType", "type": "str"}, + "vm_tier": {"key": "vmTier", "type": "str"}, } def __init__( self, *, retail_price: float, - os_type: Union[str, "VMPriceOSType"], - vm_tier: Union[str, "VMTier"], - **kwargs - ): + os_type: Union[str, "_models.VMPriceOSType"], + vm_tier: Union[str, "_models.VMTier"], + **kwargs: Any + ) -> None: """ - :keyword retail_price: Required. The price charged for using the VM. + :keyword retail_price: The price charged for using the VM. Required. :paramtype retail_price: float - :keyword os_type: Required. Operating system type used by the VM. Possible values include: - "Linux", "Windows". + :keyword os_type: Operating system type used by the VM. Required. Known values are: "Linux" and + "Windows". :paramtype os_type: str or ~azure.mgmt.machinelearningservices.models.VMPriceOSType - :keyword vm_tier: Required. The type of the VM. Possible values include: "Standard", - "LowPriority", "Spot". + :keyword vm_tier: The type of the VM. Required. Known values are: "Standard", "LowPriority", + and "Spot". :paramtype vm_tier: str or ~azure.mgmt.machinelearningservices.models.VMTier """ - super(EstimatedVMPrice, self).__init__(**kwargs) + super().__init__(**kwargs) self.retail_price = retail_price self.os_type = os_type self.vm_tier = vm_tier -class EstimatedVMPrices(msrest.serialization.Model): +class EstimatedVMPrices(_serialization.Model): """The estimated price info for using a VM. All required parameters must be populated in order to send to Azure. - :ivar billing_currency: Required. Three lettered code specifying the currency of the VM price. - Example: USD. Possible values include: "USD". + :ivar billing_currency: Three lettered code specifying the currency of the VM price. Example: + USD. Required. "USD" :vartype billing_currency: str or ~azure.mgmt.machinelearningservices.models.BillingCurrency - :ivar unit_of_measure: Required. The unit of time measurement for the specified VM price. - Example: OneHour. Possible values include: "OneHour". + :ivar unit_of_measure: The unit of time measurement for the specified VM price. Example: + OneHour. Required. "OneHour" :vartype unit_of_measure: str or ~azure.mgmt.machinelearningservices.models.UnitOfMeasure - :ivar values: Required. The list of estimated prices for using a VM of a particular OS type, - tier, etc. + :ivar values: The list of estimated prices for using a VM of a particular OS type, tier, etc. + Required. :vartype values: list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] """ _validation = { - 'billing_currency': {'required': True}, - 'unit_of_measure': {'required': True}, - 'values': {'required': True}, + "billing_currency": {"required": True}, + "unit_of_measure": {"required": True}, + "values": {"required": True}, } _attribute_map = { - 'billing_currency': {'key': 'billingCurrency', 'type': 'str'}, - 'unit_of_measure': {'key': 'unitOfMeasure', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[EstimatedVMPrice]'}, + "billing_currency": {"key": "billingCurrency", "type": "str"}, + "unit_of_measure": {"key": "unitOfMeasure", "type": "str"}, + "values": {"key": "values", "type": "[EstimatedVMPrice]"}, } def __init__( self, *, - billing_currency: Union[str, "BillingCurrency"], - unit_of_measure: Union[str, "UnitOfMeasure"], - values: List["EstimatedVMPrice"], - **kwargs - ): + billing_currency: Union[str, "_models.BillingCurrency"], + unit_of_measure: Union[str, "_models.UnitOfMeasure"], + values: List["_models.EstimatedVMPrice"], + **kwargs: Any + ) -> None: """ - :keyword billing_currency: Required. Three lettered code specifying the currency of the VM - price. Example: USD. Possible values include: "USD". + :keyword billing_currency: Three lettered code specifying the currency of the VM price. + Example: USD. Required. "USD" :paramtype billing_currency: str or ~azure.mgmt.machinelearningservices.models.BillingCurrency - :keyword unit_of_measure: Required. The unit of time measurement for the specified VM price. - Example: OneHour. Possible values include: "OneHour". + :keyword unit_of_measure: The unit of time measurement for the specified VM price. Example: + OneHour. Required. "OneHour" :paramtype unit_of_measure: str or ~azure.mgmt.machinelearningservices.models.UnitOfMeasure - :keyword values: Required. The list of estimated prices for using a VM of a particular OS type, - tier, etc. + :keyword values: The list of estimated prices for using a VM of a particular OS type, tier, + etc. Required. :paramtype values: list[~azure.mgmt.machinelearningservices.models.EstimatedVMPrice] """ - super(EstimatedVMPrices, self).__init__(**kwargs) + super().__init__(**kwargs) self.billing_currency = billing_currency self.unit_of_measure = unit_of_measure self.values = values -class ExternalFQDNResponse(msrest.serialization.Model): +class ExternalFQDNResponse(_serialization.Model): """ExternalFQDNResponse. :ivar value: @@ -12567,20 +12308,15 @@ class ExternalFQDNResponse(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[FQDNEndpointsPropertyBag]'}, + "value": {"key": "value", "type": "[FQDNEndpointsPropertyBag]"}, } - def __init__( - self, - *, - value: Optional[List["FQDNEndpointsPropertyBag"]] = None, - **kwargs - ): + def __init__(self, *, value: Optional[List["_models.FQDNEndpointsPropertyBag"]] = None, **kwargs: Any) -> None: """ :keyword value: :paramtype value: list[~azure.mgmt.machinelearningservices.models.FQDNEndpointsPropertyBag] """ - super(ExternalFQDNResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value @@ -12602,37 +12338,32 @@ class Feature(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.FeatureProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeatureProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeatureProperties"}, } - def __init__( - self, - *, - properties: "FeatureProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.FeatureProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeatureProperties """ - super(Feature, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties @@ -12641,83 +12372,82 @@ class FeatureAttributionDriftMonitoringSignal(MonitoringSignalBase): All required parameters must be populated in order to send to Azure. - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and "Enabled". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :ivar properties: Property dictionary. Properties can be added, but not removed or altered. :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType - :ivar metric_threshold: Required. [Required] A list of metrics to calculate and their - associated thresholds. + :ivar metric_threshold: [Required] A list of metrics to calculate and their associated + thresholds. Required. :vartype metric_threshold: ~azure.mgmt.machinelearningservices.models.FeatureAttributionMetricThreshold - :ivar production_data: Required. [Required] The data which drift will be calculated for. + :ivar production_data: [Required] The data which drift will be calculated for. Required. :vartype production_data: list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] - :ivar reference_data: Required. [Required] The data to calculate drift against. + :ivar reference_data: [Required] The data to calculate drift against. Required. :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ _validation = { - 'signal_type': {'required': True}, - 'metric_threshold': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, + "signal_type": {"required": True}, + "metric_threshold": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'metric_threshold': {'key': 'metricThreshold', 'type': 'FeatureAttributionMetricThreshold'}, - 'production_data': {'key': 'productionData', 'type': '[MonitoringInputDataBase]'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "metric_threshold": {"key": "metricThreshold", "type": "FeatureAttributionMetricThreshold"}, + "production_data": {"key": "productionData", "type": "[MonitoringInputDataBase]"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, } def __init__( self, *, - metric_threshold: "FeatureAttributionMetricThreshold", - production_data: List["MonitoringInputDataBase"], - reference_data: "MonitoringInputDataBase", - mode: Optional[Union[str, "MonitoringNotificationMode"]] = None, + metric_threshold: "_models.FeatureAttributionMetricThreshold", + production_data: List["_models.MonitoringInputDataBase"], + reference_data: "_models.MonitoringInputDataBase", + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, properties: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :keyword properties: Property dictionary. Properties can be added, but not removed or altered. :paramtype properties: dict[str, str] - :keyword metric_threshold: Required. [Required] A list of metrics to calculate and their - associated thresholds. + :keyword metric_threshold: [Required] A list of metrics to calculate and their associated + thresholds. Required. :paramtype metric_threshold: ~azure.mgmt.machinelearningservices.models.FeatureAttributionMetricThreshold - :keyword production_data: Required. [Required] The data which drift will be calculated for. + :keyword production_data: [Required] The data which drift will be calculated for. Required. :paramtype production_data: list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] - :keyword reference_data: Required. [Required] The data to calculate drift against. + :keyword reference_data: [Required] The data to calculate drift against. Required. :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ - super(FeatureAttributionDriftMonitoringSignal, self).__init__(mode=mode, properties=properties, **kwargs) - self.signal_type = 'FeatureAttributionDrift' # type: str + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "FeatureAttributionDrift" self.metric_threshold = metric_threshold self.production_data = production_data self.reference_data = reference_data -class FeatureAttributionMetricThreshold(msrest.serialization.Model): +class FeatureAttributionMetricThreshold(_serialization.Model): """FeatureAttributionMetricThreshold. All required parameters must be populated in order to send to Azure. - :ivar metric: Required. [Required] The feature attribution metric to calculate. Possible values - include: "NormalizedDiscountedCumulativeGain". + :ivar metric: [Required] The feature attribution metric to calculate. Required. + "NormalizedDiscountedCumulativeGain" :vartype metric: str or ~azure.mgmt.machinelearningservices.models.FeatureAttributionMetric :ivar threshold: The threshold value. If null, a default value will be set depending on the selected metric. @@ -12725,30 +12455,30 @@ class FeatureAttributionMetricThreshold(msrest.serialization.Model): """ _validation = { - 'metric': {'required': True}, + "metric": {"required": True}, } _attribute_map = { - 'metric': {'key': 'metric', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "metric": {"key": "metric", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } def __init__( self, *, - metric: Union[str, "FeatureAttributionMetric"], - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + metric: Union[str, "_models.FeatureAttributionMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: """ - :keyword metric: Required. [Required] The feature attribution metric to calculate. Possible - values include: "NormalizedDiscountedCumulativeGain". + :keyword metric: [Required] The feature attribution metric to calculate. Required. + "NormalizedDiscountedCumulativeGain" :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.FeatureAttributionMetric :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ - super(FeatureAttributionMetricThreshold, self).__init__(**kwargs) + super().__init__(**kwargs) self.metric = metric self.threshold = threshold @@ -12760,21 +12490,21 @@ class FeatureProperties(ResourceBase): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar data_type: Specifies type. Possible values include: "String", "Integer", "Long", "Float", - "Double", "Binary", "Datetime", "Boolean". + :ivar data_type: Specifies type. Known values are: "String", "Integer", "Long", "Float", + "Double", "Binary", "Datetime", and "Boolean". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.FeatureDataType :ivar feature_name: Specifies name. :vartype feature_name: str """ _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'feature_name': {'key': 'featureName', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "data_type": {"key": "dataType", "type": "str"}, + "feature_name": {"key": "featureName", "type": "str"}, } def __init__( @@ -12783,29 +12513,29 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - data_type: Optional[Union[str, "FeatureDataType"]] = None, + data_type: Optional[Union[str, "_models.FeatureDataType"]] = None, feature_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword data_type: Specifies type. Possible values include: "String", "Integer", "Long", - "Float", "Double", "Binary", "Datetime", "Boolean". + :keyword data_type: Specifies type. Known values are: "String", "Integer", "Long", "Float", + "Double", "Binary", "Datetime", and "Boolean". :paramtype data_type: str or ~azure.mgmt.machinelearningservices.models.FeatureDataType :keyword feature_name: Specifies name. :paramtype feature_name: str """ - super(FeatureProperties, self).__init__(description=description, properties=properties, tags=tags, **kwargs) + super().__init__(description=description, properties=properties, tags=tags, **kwargs) self.data_type = data_type self.feature_name = feature_name -class FeatureResourceArmPaginatedResult(msrest.serialization.Model): +class FeatureResourceArmPaginatedResult(_serialization.Model): """A paginated list of Feature entities. :ivar next_link: The link to the next page of Feature objects. If null, there are no additional @@ -12816,17 +12546,13 @@ class FeatureResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Feature]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Feature]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["Feature"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.Feature"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of Feature objects. If null, there are no additional pages. @@ -12834,7 +12560,7 @@ def __init__( :keyword value: An array of objects of type Feature. :paramtype value: list[~azure.mgmt.machinelearningservices.models.Feature] """ - super(FeatureResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value @@ -12857,37 +12583,32 @@ class FeaturesetContainer(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.FeaturesetContainerProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeaturesetContainerProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeaturesetContainerProperties"}, } - def __init__( - self, - *, - properties: "FeaturesetContainerProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.FeaturesetContainerProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeaturesetContainerProperties """ - super(FeaturesetContainer, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties @@ -12900,7 +12621,7 @@ class FeaturesetContainerProperties(AssetContainer): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar is_archived: Is the asset archived?. :vartype is_archived: bool @@ -12908,26 +12629,26 @@ class FeaturesetContainerProperties(AssetContainer): :vartype latest_version: str :ivar next_version: The next auto incremental version. :vartype next_version: str - :ivar provisioning_state: Provisioning state for the featureset container. Possible values - include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :ivar provisioning_state: Provisioning state for the featureset container. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.AssetProvisioningState """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } def __init__( @@ -12936,24 +12657,24 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - is_archived: Optional[bool] = False, - **kwargs - ): + is_archived: bool = False, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword is_archived: Is the asset archived?. :paramtype is_archived: bool """ - super(FeaturesetContainerProperties, self).__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) self.provisioning_state = None -class FeaturesetContainerResourceArmPaginatedResult(msrest.serialization.Model): +class FeaturesetContainerResourceArmPaginatedResult(_serialization.Model): """A paginated list of FeaturesetContainer entities. :ivar next_link: The link to the next page of FeaturesetContainer objects. If null, there are @@ -12964,17 +12685,17 @@ class FeaturesetContainerResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[FeaturesetContainer]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[FeaturesetContainer]"}, } def __init__( self, *, next_link: Optional[str] = None, - value: Optional[List["FeaturesetContainer"]] = None, - **kwargs - ): + value: Optional[List["_models.FeaturesetContainer"]] = None, + **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of FeaturesetContainer objects. If null, there are no additional pages. @@ -12982,12 +12703,12 @@ def __init__( :keyword value: An array of objects of type FeaturesetContainer. :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] """ - super(FeaturesetContainerResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class FeaturesetSpecification(msrest.serialization.Model): +class FeaturesetSpecification(_serialization.Model): """Dto object representing specification. :ivar path: Specifies the spec path. @@ -12995,20 +12716,15 @@ class FeaturesetSpecification(msrest.serialization.Model): """ _attribute_map = { - 'path': {'key': 'path', 'type': 'str'}, + "path": {"key": "path", "type": "str"}, } - def __init__( - self, - *, - path: Optional[str] = None, - **kwargs - ): + def __init__(self, *, path: Optional[str] = None, **kwargs: Any) -> None: """ :keyword path: Specifies the spec path. :paramtype path: str """ - super(FeaturesetSpecification, self).__init__(**kwargs) + super().__init__(**kwargs) self.path = path @@ -13030,41 +12746,36 @@ class FeaturesetVersion(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeaturesetVersionProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeaturesetVersionProperties"}, } - def __init__( - self, - *, - properties: "FeaturesetVersionProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.FeaturesetVersionProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionProperties """ - super(FeaturesetVersion, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class FeaturesetVersionBackfillRequest(msrest.serialization.Model): +class FeaturesetVersionBackfillRequest(_serialization.Model): """Request payload for creating a backfill request for a given feature set version. :ivar data_availability_status: Specified the data availability status that you want to @@ -13085,36 +12796,36 @@ class FeaturesetVersionBackfillRequest(msrest.serialization.Model): :vartype resource: ~azure.mgmt.machinelearningservices.models.MaterializationComputeResource :ivar spark_configuration: Specifies the spark compute settings. :vartype spark_configuration: dict[str, str] - :ivar tags: A set of tags. Specifies the tags. + :ivar tags: Specifies the tags. :vartype tags: dict[str, str] """ _attribute_map = { - 'data_availability_status': {'key': 'dataAvailabilityStatus', 'type': '[str]'}, - 'description': {'key': 'description', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'feature_window': {'key': 'featureWindow', 'type': 'FeatureWindow'}, - 'job_id': {'key': 'jobId', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'resource': {'key': 'resource', 'type': 'MaterializationComputeResource'}, - 'spark_configuration': {'key': 'sparkConfiguration', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + "data_availability_status": {"key": "dataAvailabilityStatus", "type": "[str]"}, + "description": {"key": "description", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "feature_window": {"key": "featureWindow", "type": "FeatureWindow"}, + "job_id": {"key": "jobId", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "resource": {"key": "resource", "type": "MaterializationComputeResource"}, + "spark_configuration": {"key": "sparkConfiguration", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, } def __init__( self, *, - data_availability_status: Optional[List[Union[str, "DataAvailabilityStatus"]]] = None, + data_availability_status: Optional[List[Union[str, "_models.DataAvailabilityStatus"]]] = None, description: Optional[str] = None, display_name: Optional[str] = None, - feature_window: Optional["FeatureWindow"] = None, + feature_window: Optional["_models.FeatureWindow"] = None, job_id: Optional[str] = None, properties: Optional[Dict[str, str]] = None, - resource: Optional["MaterializationComputeResource"] = None, + resource: Optional["_models.MaterializationComputeResource"] = None, spark_configuration: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword data_availability_status: Specified the data availability status that you want to backfill. @@ -13134,10 +12845,10 @@ def __init__( :paramtype resource: ~azure.mgmt.machinelearningservices.models.MaterializationComputeResource :keyword spark_configuration: Specifies the spark compute settings. :paramtype spark_configuration: dict[str, str] - :keyword tags: A set of tags. Specifies the tags. + :keyword tags: Specifies the tags. :paramtype tags: dict[str, str] """ - super(FeaturesetVersionBackfillRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.data_availability_status = data_availability_status self.description = description self.display_name = display_name @@ -13149,7 +12860,7 @@ def __init__( self.tags = tags -class FeaturesetVersionBackfillResponse(msrest.serialization.Model): +class FeaturesetVersionBackfillResponse(_serialization.Model): """Response payload for creating a backfill request for a given feature set version. :ivar job_ids: List of jobs submitted as part of the backfill request. @@ -13157,24 +12868,19 @@ class FeaturesetVersionBackfillResponse(msrest.serialization.Model): """ _attribute_map = { - 'job_ids': {'key': 'jobIds', 'type': '[str]'}, + "job_ids": {"key": "jobIds", "type": "[str]"}, } - def __init__( - self, - *, - job_ids: Optional[List[str]] = None, - **kwargs - ): + def __init__(self, *, job_ids: Optional[List[str]] = None, **kwargs: Any) -> None: """ :keyword job_ids: List of jobs submitted as part of the backfill request. :paramtype job_ids: list[str] """ - super(FeaturesetVersionBackfillResponse, self).__init__(**kwargs) + super().__init__(**kwargs) self.job_ids = job_ids -class FeaturesetVersionProperties(AssetBase): +class FeaturesetVersionProperties(AssetBase): # pylint: disable=too-many-instance-attributes """Dto object representing feature set version. Variables are only populated by the server, and will be ignored when sending a request. @@ -13183,7 +12889,7 @@ class FeaturesetVersionProperties(AssetBase): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -13198,8 +12904,8 @@ class FeaturesetVersionProperties(AssetBase): :ivar materialization_settings: Specifies the materialization settings. :vartype materialization_settings: ~azure.mgmt.machinelearningservices.models.MaterializationSettings - :ivar provisioning_state: Provisioning state for the featureset version container. Possible - values include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :ivar provisioning_state: Provisioning state for the featureset version container. Known values + are: "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.AssetProvisioningState :ivar specification: Specifies the feature spec details. @@ -13209,21 +12915,21 @@ class FeaturesetVersionProperties(AssetBase): """ _validation = { - 'provisioning_state': {'readonly': True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'entities': {'key': 'entities', 'type': '[str]'}, - 'materialization_settings': {'key': 'materializationSettings', 'type': 'MaterializationSettings'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'specification': {'key': 'specification', 'type': 'FeaturesetSpecification'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "entities": {"key": "entities", "type": "[str]"}, + "materialization_settings": {"key": "materializationSettings", "type": "MaterializationSettings"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "specification": {"key": "specification", "type": "FeaturesetSpecification"}, + "stage": {"key": "stage", "type": "str"}, } def __init__( @@ -13232,21 +12938,21 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - is_anonymous: Optional[bool] = False, - is_archived: Optional[bool] = False, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, entities: Optional[List[str]] = None, - materialization_settings: Optional["MaterializationSettings"] = None, - specification: Optional["FeaturesetSpecification"] = None, + materialization_settings: Optional["_models.MaterializationSettings"] = None, + specification: Optional["_models.FeaturesetSpecification"] = None, stage: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -13266,7 +12972,15 @@ def __init__( :keyword stage: Specifies the asset stage. :paramtype stage: str """ - super(FeaturesetVersionProperties, self).__init__(description=description, properties=properties, tags=tags, auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs) + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) self.entities = entities self.materialization_settings = materialization_settings self.provisioning_state = None @@ -13274,7 +12988,7 @@ def __init__( self.stage = stage -class FeaturesetVersionResourceArmPaginatedResult(msrest.serialization.Model): +class FeaturesetVersionResourceArmPaginatedResult(_serialization.Model): """A paginated list of FeaturesetVersion entities. :ivar next_link: The link to the next page of FeaturesetVersion objects. If null, there are no @@ -13285,17 +12999,17 @@ class FeaturesetVersionResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[FeaturesetVersion]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[FeaturesetVersion]"}, } def __init__( self, *, next_link: Optional[str] = None, - value: Optional[List["FeaturesetVersion"]] = None, - **kwargs - ): + value: Optional[List["_models.FeaturesetVersion"]] = None, + **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of FeaturesetVersion objects. If null, there are no additional pages. @@ -13303,7 +13017,7 @@ def __init__( :keyword value: An array of objects of type FeaturesetVersion. :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] """ - super(FeaturesetVersionResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value @@ -13326,39 +13040,34 @@ class FeaturestoreEntityContainer(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainerProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeaturestoreEntityContainerProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeaturestoreEntityContainerProperties"}, } - def __init__( - self, - *, - properties: "FeaturestoreEntityContainerProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.FeaturestoreEntityContainerProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainerProperties """ - super(FeaturestoreEntityContainer, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties @@ -13371,7 +13080,7 @@ class FeaturestoreEntityContainerProperties(AssetContainer): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar is_archived: Is the asset archived?. :vartype is_archived: bool @@ -13379,26 +13088,26 @@ class FeaturestoreEntityContainerProperties(AssetContainer): :vartype latest_version: str :ivar next_version: The next auto incremental version. :vartype next_version: str - :ivar provisioning_state: Provisioning state for the featurestore entity container. Possible - values include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :ivar provisioning_state: Provisioning state for the featurestore entity container. Known + values are: "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.AssetProvisioningState """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } def __init__( @@ -13407,24 +13116,24 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - is_archived: Optional[bool] = False, - **kwargs - ): + is_archived: bool = False, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword is_archived: Is the asset archived?. :paramtype is_archived: bool """ - super(FeaturestoreEntityContainerProperties, self).__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) self.provisioning_state = None -class FeaturestoreEntityContainerResourceArmPaginatedResult(msrest.serialization.Model): +class FeaturestoreEntityContainerResourceArmPaginatedResult(_serialization.Model): """A paginated list of FeaturestoreEntityContainer entities. :ivar next_link: The link to the next page of FeaturestoreEntityContainer objects. If null, @@ -13435,17 +13144,17 @@ class FeaturestoreEntityContainerResourceArmPaginatedResult(msrest.serialization """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[FeaturestoreEntityContainer]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[FeaturestoreEntityContainer]"}, } def __init__( self, *, next_link: Optional[str] = None, - value: Optional[List["FeaturestoreEntityContainer"]] = None, - **kwargs - ): + value: Optional[List["_models.FeaturestoreEntityContainer"]] = None, + **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of FeaturestoreEntityContainer objects. If null, there are no additional pages. @@ -13453,7 +13162,7 @@ def __init__( :keyword value: An array of objects of type FeaturestoreEntityContainer. :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] """ - super(FeaturestoreEntityContainerResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value @@ -13476,39 +13185,34 @@ class FeaturestoreEntityVersion(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersionProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'FeaturestoreEntityVersionProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "FeaturestoreEntityVersionProperties"}, } - def __init__( - self, - *, - properties: "FeaturestoreEntityVersionProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.FeaturestoreEntityVersionProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersionProperties """ - super(FeaturestoreEntityVersion, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties @@ -13521,7 +13225,7 @@ class FeaturestoreEntityVersionProperties(AssetBase): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -13533,8 +13237,8 @@ class FeaturestoreEntityVersionProperties(AssetBase): :vartype is_archived: bool :ivar index_columns: Specifies index columns. :vartype index_columns: list[~azure.mgmt.machinelearningservices.models.IndexColumn] - :ivar provisioning_state: Provisioning state for the featurestore entity version. Possible - values include: "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :ivar provisioning_state: Provisioning state for the featurestore entity version. Known values + are: "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.AssetProvisioningState :ivar stage: Specifies the asset stage. @@ -13542,19 +13246,19 @@ class FeaturestoreEntityVersionProperties(AssetBase): """ _validation = { - 'provisioning_state': {'readonly': True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'index_columns': {'key': 'indexColumns', 'type': '[IndexColumn]'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "index_columns": {"key": "indexColumns", "type": "[IndexColumn]"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "stage": {"key": "stage", "type": "str"}, } def __init__( @@ -13563,19 +13267,19 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - is_anonymous: Optional[bool] = False, - is_archived: Optional[bool] = False, - index_columns: Optional[List["IndexColumn"]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + index_columns: Optional[List["_models.IndexColumn"]] = None, stage: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -13590,13 +13294,21 @@ def __init__( :keyword stage: Specifies the asset stage. :paramtype stage: str """ - super(FeaturestoreEntityVersionProperties, self).__init__(description=description, properties=properties, tags=tags, auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs) + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) self.index_columns = index_columns self.provisioning_state = None self.stage = stage -class FeaturestoreEntityVersionResourceArmPaginatedResult(msrest.serialization.Model): +class FeaturestoreEntityVersionResourceArmPaginatedResult(_serialization.Model): """A paginated list of FeaturestoreEntityVersion entities. :ivar next_link: The link to the next page of FeaturestoreEntityVersion objects. If null, there @@ -13607,17 +13319,17 @@ class FeaturestoreEntityVersionResourceArmPaginatedResult(msrest.serialization.M """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[FeaturestoreEntityVersion]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[FeaturestoreEntityVersion]"}, } def __init__( self, *, next_link: Optional[str] = None, - value: Optional[List["FeaturestoreEntityVersion"]] = None, - **kwargs - ): + value: Optional[List["_models.FeaturestoreEntityVersion"]] = None, + **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of FeaturestoreEntityVersion objects. If null, there are no additional pages. @@ -13625,12 +13337,12 @@ def __init__( :keyword value: An array of objects of type FeaturestoreEntityVersion. :paramtype value: list[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] """ - super(FeaturestoreEntityVersionResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class FeatureStoreSettings(msrest.serialization.Model): +class FeatureStoreSettings(_serialization.Model): """FeatureStoreSettings. :ivar compute_runtime: @@ -13642,19 +13354,19 @@ class FeatureStoreSettings(msrest.serialization.Model): """ _attribute_map = { - 'compute_runtime': {'key': 'computeRuntime', 'type': 'ComputeRuntimeDto'}, - 'offline_store_connection_name': {'key': 'offlineStoreConnectionName', 'type': 'str'}, - 'online_store_connection_name': {'key': 'onlineStoreConnectionName', 'type': 'str'}, + "compute_runtime": {"key": "computeRuntime", "type": "ComputeRuntimeDto"}, + "offline_store_connection_name": {"key": "offlineStoreConnectionName", "type": "str"}, + "online_store_connection_name": {"key": "onlineStoreConnectionName", "type": "str"}, } def __init__( self, *, - compute_runtime: Optional["ComputeRuntimeDto"] = None, + compute_runtime: Optional["_models.ComputeRuntimeDto"] = None, offline_store_connection_name: Optional[str] = None, online_store_connection_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword compute_runtime: :paramtype compute_runtime: ~azure.mgmt.machinelearningservices.models.ComputeRuntimeDto @@ -13663,7 +13375,7 @@ def __init__( :keyword online_store_connection_name: :paramtype online_store_connection_name: str """ - super(FeatureStoreSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.compute_runtime = compute_runtime self.offline_store_connection_name = offline_store_connection_name self.online_store_connection_name = online_store_connection_name @@ -13674,41 +13386,36 @@ class FeatureSubset(MonitoringFeatureFilterBase): All required parameters must be populated in order to send to Azure. - :ivar filter_type: Required. [Required] Specifies the feature filter to leverage when selecting - features to calculate metrics over.Constant filled by server. Possible values include: - "AllFeatures", "TopNByAttribution", "FeatureSubset". + :ivar filter_type: [Required] Specifies the feature filter to leverage when selecting features + to calculate metrics over. Required. Known values are: "AllFeatures", "TopNByAttribution", and + "FeatureSubset". :vartype filter_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterType - :ivar features: Required. [Required] The list of features to include. + :ivar features: [Required] The list of features to include. Required. :vartype features: list[str] """ _validation = { - 'filter_type': {'required': True}, - 'features': {'required': True}, + "filter_type": {"required": True}, + "features": {"required": True}, } _attribute_map = { - 'filter_type': {'key': 'filterType', 'type': 'str'}, - 'features': {'key': 'features', 'type': '[str]'}, + "filter_type": {"key": "filterType", "type": "str"}, + "features": {"key": "features", "type": "[str]"}, } - def __init__( - self, - *, - features: List[str], - **kwargs - ): + def __init__(self, *, features: List[str], **kwargs: Any) -> None: """ - :keyword features: Required. [Required] The list of features to include. + :keyword features: [Required] The list of features to include. Required. :paramtype features: list[str] """ - super(FeatureSubset, self).__init__(**kwargs) - self.filter_type = 'FeatureSubset' # type: str + super().__init__(**kwargs) + self.filter_type: str = "FeatureSubset" self.features = features -class FeatureWindow(msrest.serialization.Model): +class FeatureWindow(_serialization.Model): """Specifies the feature window. :ivar feature_window_end: Specifies the feature window end time. @@ -13718,8 +13425,8 @@ class FeatureWindow(msrest.serialization.Model): """ _attribute_map = { - 'feature_window_end': {'key': 'featureWindowEnd', 'type': 'iso-8601'}, - 'feature_window_start': {'key': 'featureWindowStart', 'type': 'iso-8601'}, + "feature_window_end": {"key": "featureWindowEnd", "type": "iso-8601"}, + "feature_window_start": {"key": "featureWindowStart", "type": "iso-8601"}, } def __init__( @@ -13727,20 +13434,20 @@ def __init__( *, feature_window_end: Optional[datetime.datetime] = None, feature_window_start: Optional[datetime.datetime] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword feature_window_end: Specifies the feature window end time. :paramtype feature_window_end: ~datetime.datetime :keyword feature_window_start: Specifies the feature window start time. :paramtype feature_window_start: ~datetime.datetime """ - super(FeatureWindow, self).__init__(**kwargs) + super().__init__(**kwargs) self.feature_window_end = feature_window_end self.feature_window_start = feature_window_start -class FeaturizationSettings(msrest.serialization.Model): +class FeaturizationSettings(_serialization.Model): """Featurization Configuration. :ivar dataset_language: Dataset language, useful for the text data. @@ -13748,20 +13455,15 @@ class FeaturizationSettings(msrest.serialization.Model): """ _attribute_map = { - 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, + "dataset_language": {"key": "datasetLanguage", "type": "str"}, } - def __init__( - self, - *, - dataset_language: Optional[str] = None, - **kwargs - ): + def __init__(self, *, dataset_language: Optional[str] = None, **kwargs: Any) -> None: """ :keyword dataset_language: Dataset language, useful for the text data. :paramtype dataset_language: str """ - super(FeaturizationSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.dataset_language = dataset_language @@ -13772,46 +13474,40 @@ class FileSystemSource(DataImportSource): :ivar connection: Workspace connection for data import source storage. :vartype connection: str - :ivar source_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "database", "file_system". + :ivar source_type: [Required] Specifies the type of data. Required. Known values are: + "database" and "file_system". :vartype source_type: str or ~azure.mgmt.machinelearningservices.models.DataImportSourceType :ivar path: Path on data import FileSystem source. :vartype path: str """ _validation = { - 'source_type': {'required': True}, + "source_type": {"required": True}, } _attribute_map = { - 'connection': {'key': 'connection', 'type': 'str'}, - 'source_type': {'key': 'sourceType', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'str'}, + "connection": {"key": "connection", "type": "str"}, + "source_type": {"key": "sourceType", "type": "str"}, + "path": {"key": "path", "type": "str"}, } - def __init__( - self, - *, - connection: Optional[str] = None, - path: Optional[str] = None, - **kwargs - ): + def __init__(self, *, connection: Optional[str] = None, path: Optional[str] = None, **kwargs: Any) -> None: """ :keyword connection: Workspace connection for data import source storage. :paramtype connection: str :keyword path: Path on data import FileSystem source. :paramtype path: str """ - super(FileSystemSource, self).__init__(connection=connection, **kwargs) - self.source_type = 'file_system' # type: str + super().__init__(connection=connection, **kwargs) + self.source_type: str = "file_system" self.path = path -class MonitoringInputDataBase(msrest.serialization.Model): +class MonitoringInputDataBase(_serialization.Model): """Monitoring input data base definition. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FixedInputData, StaticInputData, TrailingInputData. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + FixedInputData, StaticInputData, TrailingInputData All required parameters must be populated in order to send to Azure. @@ -13819,60 +13515,61 @@ class MonitoringInputDataBase(msrest.serialization.Model): :vartype columns: dict[str, str] :ivar data_context: The context metadata of the data source. :vartype data_context: str - :ivar input_data_type: Required. [Required] Specifies the type of signal to monitor.Constant - filled by server. Possible values include: "Static", "Trailing", "Fixed". + :ivar input_data_type: [Required] Specifies the type of signal to monitor. Required. Known + values are: "Static", "Trailing", and "Fixed". :vartype input_data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringInputDataType - :ivar job_input_type: Required. [Required] Specifies the type of job. Possible values include: - "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", "triton_model". + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :ivar uri: Required. [Required] Input Asset URI. + :ivar uri: [Required] Input Asset URI. Required. :vartype uri: str """ _validation = { - 'input_data_type': {'required': True}, - 'job_input_type': {'required': True}, - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "input_data_type": {"required": True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'columns': {'key': 'columns', 'type': '{str}'}, - 'data_context': {'key': 'dataContext', 'type': 'str'}, - 'input_data_type': {'key': 'inputDataType', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, + "columns": {"key": "columns", "type": "{str}"}, + "data_context": {"key": "dataContext", "type": "str"}, + "input_data_type": {"key": "inputDataType", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } _subtype_map = { - 'input_data_type': {'Fixed': 'FixedInputData', 'Static': 'StaticInputData', 'Trailing': 'TrailingInputData'} + "input_data_type": {"Fixed": "FixedInputData", "Static": "StaticInputData", "Trailing": "TrailingInputData"} } def __init__( self, *, - job_input_type: Union[str, "JobInputType"], + job_input_type: Union[str, "_models.JobInputType"], uri: str, columns: Optional[Dict[str, str]] = None, data_context: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword columns: Mapping of column names to special uses. :paramtype columns: dict[str, str] :keyword data_context: The context metadata of the data source. :paramtype data_context: str - :keyword job_input_type: Required. [Required] Specifies the type of job. Possible values - include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", + :keyword job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". :paramtype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :keyword uri: Required. [Required] Input Asset URI. + :keyword uri: [Required] Input Asset URI. Required. :paramtype uri: str """ - super(MonitoringInputDataBase, self).__init__(**kwargs) + super().__init__(**kwargs) self.columns = columns self.data_context = data_context - self.input_data_type = None # type: Optional[str] + self.input_data_type: Optional[str] = None self.job_input_type = job_input_type self.uri = uri @@ -13886,57 +13583,58 @@ class FixedInputData(MonitoringInputDataBase): :vartype columns: dict[str, str] :ivar data_context: The context metadata of the data source. :vartype data_context: str - :ivar input_data_type: Required. [Required] Specifies the type of signal to monitor.Constant - filled by server. Possible values include: "Static", "Trailing", "Fixed". + :ivar input_data_type: [Required] Specifies the type of signal to monitor. Required. Known + values are: "Static", "Trailing", and "Fixed". :vartype input_data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringInputDataType - :ivar job_input_type: Required. [Required] Specifies the type of job. Possible values include: - "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", "triton_model". + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :ivar uri: Required. [Required] Input Asset URI. + :ivar uri: [Required] Input Asset URI. Required. :vartype uri: str """ _validation = { - 'input_data_type': {'required': True}, - 'job_input_type': {'required': True}, - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "input_data_type": {"required": True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'columns': {'key': 'columns', 'type': '{str}'}, - 'data_context': {'key': 'dataContext', 'type': 'str'}, - 'input_data_type': {'key': 'inputDataType', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, + "columns": {"key": "columns", "type": "{str}"}, + "data_context": {"key": "dataContext", "type": "str"}, + "input_data_type": {"key": "inputDataType", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, - job_input_type: Union[str, "JobInputType"], + job_input_type: Union[str, "_models.JobInputType"], uri: str, columns: Optional[Dict[str, str]] = None, data_context: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword columns: Mapping of column names to special uses. :paramtype columns: dict[str, str] :keyword data_context: The context metadata of the data source. :paramtype data_context: str - :keyword job_input_type: Required. [Required] Specifies the type of job. Possible values - include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", + :keyword job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". :paramtype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :keyword uri: Required. [Required] Input Asset URI. + :keyword uri: [Required] Input Asset URI. Required. :paramtype uri: str """ - super(FixedInputData, self).__init__(columns=columns, data_context=data_context, job_input_type=job_input_type, uri=uri, **kwargs) - self.input_data_type = 'Fixed' # type: str + super().__init__(columns=columns, data_context=data_context, job_input_type=job_input_type, uri=uri, **kwargs) + self.input_data_type: str = "Fixed" -class FlavorData(msrest.serialization.Model): +class FlavorData(_serialization.Model): """FlavorData. :ivar data: Model flavor-specific data. @@ -13944,28 +13642,36 @@ class FlavorData(msrest.serialization.Model): """ _attribute_map = { - 'data': {'key': 'data', 'type': '{str}'}, + "data": {"key": "data", "type": "{str}"}, } - def __init__( - self, - *, - data: Optional[Dict[str, str]] = None, - **kwargs - ): + def __init__(self, *, data: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: """ :keyword data: Model flavor-specific data. :paramtype data: dict[str, str] """ - super(FlavorData, self).__init__(**kwargs) + super().__init__(**kwargs) self.data = data -class Forecasting(AutoMLVertical, TableVertical): +class Forecasting(TableVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes """Forecasting task in AutoML Table vertical. All required parameters must be populated in order to send to Azure. + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar cv_split_column_names: Columns to use for CVSplit data. :vartype cv_split_column_names: list[str] :ivar featurization_settings: Featurization inputs needed for AutoML job. @@ -14001,23 +13707,10 @@ class Forecasting(AutoMLVertical, TableVertical): :ivar weight_column_name: The name of the sample weight column. Automated ML supports a weighted column as an input, causing rows in the data to be weighted up or down. :vartype weight_column_name: str - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar forecasting_settings: Forecasting task specific inputs. :vartype forecasting_settings: ~azure.mgmt.machinelearningservices.models.ForecastingSettings - :ivar primary_metric: Primary metric for forecasting task. Possible values include: - "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", + :ivar primary_metric: Primary metric for forecasting task. Known values are: + "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", and "NormalizedMeanAbsoluteError". :vartype primary_metric: str or ~azure.mgmt.machinelearningservices.models.ForecastingPrimaryMetrics @@ -14027,56 +13720,64 @@ class Forecasting(AutoMLVertical, TableVertical): """ _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'TableFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, - 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, - 'search_space': {'key': 'searchSpace', 'type': '[TableParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'TableSweepSettings'}, - 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, - 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'forecasting_settings': {'key': 'forecastingSettings', 'type': 'ForecastingSettings'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - 'training_settings': {'key': 'trainingSettings', 'type': 'ForecastingTrainingSettings'}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, + "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, + "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, + "test_data": {"key": "testData", "type": "MLTableJobInput"}, + "test_data_size": {"key": "testDataSize", "type": "float"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "weight_column_name": {"key": "weightColumnName", "type": "str"}, + "forecasting_settings": {"key": "forecastingSettings", "type": "ForecastingSettings"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + "training_settings": {"key": "trainingSettings", "type": "ForecastingTrainingSettings"}, } def __init__( self, *, - training_data: "MLTableJobInput", + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, cv_split_column_names: Optional[List[str]] = None, - featurization_settings: Optional["TableVerticalFeaturizationSettings"] = None, - fixed_parameters: Optional["TableFixedParameters"] = None, - limit_settings: Optional["TableVerticalLimitSettings"] = None, - n_cross_validations: Optional["NCrossValidations"] = None, - search_space: Optional[List["TableParameterSubspace"]] = None, - sweep_settings: Optional["TableSweepSettings"] = None, - test_data: Optional["MLTableJobInput"] = None, + featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.TableFixedParameters"] = None, + limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, + n_cross_validations: Optional["_models.NCrossValidations"] = None, + search_space: Optional[List["_models.TableParameterSubspace"]] = None, + sweep_settings: Optional["_models.TableSweepSettings"] = None, + test_data: Optional["_models.MLTableJobInput"] = None, test_data_size: Optional[float] = None, - validation_data: Optional["MLTableJobInput"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, validation_data_size: Optional[float] = None, weight_column_name: Optional[str] = None, - log_verbosity: Optional[Union[str, "LogVerbosity"]] = None, - target_column_name: Optional[str] = None, - forecasting_settings: Optional["ForecastingSettings"] = None, - primary_metric: Optional[Union[str, "ForecastingPrimaryMetrics"]] = None, - training_settings: Optional["ForecastingTrainingSettings"] = None, - **kwargs - ): - """ + forecasting_settings: Optional["_models.ForecastingSettings"] = None, + primary_metric: Optional[Union[str, "_models.ForecastingPrimaryMetrics"]] = None, + training_settings: Optional["_models.ForecastingTrainingSettings"] = None, + **kwargs: Any + ) -> None: + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :keyword cv_split_column_names: Columns to use for CVSplit data. :paramtype cv_split_column_names: list[str] :keyword featurization_settings: Featurization inputs needed for AutoML job. @@ -14115,18 +13816,10 @@ def __init__( :keyword weight_column_name: The name of the sample weight column. Automated ML supports a weighted column as an input, causing rows in the data to be weighted up or down. :paramtype weight_column_name: str - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :keyword forecasting_settings: Forecasting task specific inputs. :paramtype forecasting_settings: ~azure.mgmt.machinelearningservices.models.ForecastingSettings - :keyword primary_metric: Primary metric for forecasting task. Possible values include: - "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", + :keyword primary_metric: Primary metric for forecasting task. Known values are: + "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", and "NormalizedMeanAbsoluteError". :paramtype primary_metric: str or ~azure.mgmt.machinelearningservices.models.ForecastingPrimaryMetrics @@ -14134,7 +13827,31 @@ def __init__( :paramtype training_settings: ~azure.mgmt.machinelearningservices.models.ForecastingTrainingSettings """ - super(Forecasting, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, cv_split_column_names=cv_split_column_names, featurization_settings=featurization_settings, fixed_parameters=fixed_parameters, limit_settings=limit_settings, n_cross_validations=n_cross_validations, search_space=search_space, sweep_settings=sweep_settings, test_data=test_data, test_data_size=test_data_size, validation_data=validation_data, validation_data_size=validation_data_size, weight_column_name=weight_column_name, **kwargs) + super().__init__( + cv_split_column_names=cv_split_column_names, + featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, + limit_settings=limit_settings, + n_cross_validations=n_cross_validations, + search_space=search_space, + sweep_settings=sweep_settings, + test_data=test_data, + test_data_size=test_data_size, + validation_data=validation_data, + validation_data_size=validation_data_size, + weight_column_name=weight_column_name, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type: str = "Forecasting" + self.training_data = training_data + self.forecasting_settings = forecasting_settings + self.primary_metric = primary_metric + self.training_settings = training_settings self.cv_split_column_names = cv_split_column_names self.featurization_settings = featurization_settings self.fixed_parameters = fixed_parameters @@ -14147,16 +13864,9 @@ def __init__( self.validation_data = validation_data self.validation_data_size = validation_data_size self.weight_column_name = weight_column_name - self.task_type = 'Forecasting' # type: str - self.forecasting_settings = forecasting_settings - self.primary_metric = primary_metric - self.training_settings = training_settings - self.log_verbosity = log_verbosity - self.target_column_name = target_column_name - self.training_data = training_data -class ForecastingSettings(msrest.serialization.Model): +class ForecastingSettings(_serialization.Model): # pylint: disable=too-many-instance-attributes """Forecasting specific parameters. :ivar country_or_region_for_holidays: Country or region for holidays for forecasting tasks. @@ -14168,7 +13878,7 @@ class ForecastingSettings(msrest.serialization.Model): three days apart. :vartype cv_step_size: int :ivar feature_lags: Flag for generating lags for the numeric features with 'auto' or null. - Possible values include: "None", "Auto". + Known values are: "None" and "Auto". :vartype feature_lags: str or ~azure.mgmt.machinelearningservices.models.FeatureLags :ivar features_unknown_at_forecast_time: The feature columns that are available for training but unknown at the time of forecast/inference. @@ -14185,14 +13895,14 @@ class ForecastingSettings(msrest.serialization.Model): If seasonality is set to 'auto', it will be inferred. :vartype seasonality: ~azure.mgmt.machinelearningservices.models.Seasonality :ivar short_series_handling_config: The parameter defining how if AutoML should handle short - time series. Possible values include: "None", "Auto", "Pad", "Drop". + time series. Known values are: "None", "Auto", "Pad", and "Drop". :vartype short_series_handling_config: str or ~azure.mgmt.machinelearningservices.models.ShortSeriesHandlingConfiguration :ivar target_aggregate_function: The function to be used to aggregate the time series target column to conform to a user specified frequency. If the TargetAggregateFunction is set i.e. not 'None', but the freq parameter is not set, the error is raised. The possible target aggregation functions are: "sum", "max", "min" and "mean". - Possible values include: "None", "Sum", "Max", "Min", "Mean". + Known values are: "None", "Sum", "Max", "Min", and "Mean". :vartype target_aggregate_function: str or ~azure.mgmt.machinelearningservices.models.TargetAggregationFunction :ivar target_lags: The number of past periods to lag from the target column. @@ -14210,26 +13920,26 @@ class ForecastingSettings(msrest.serialization.Model): If grain is not defined, the data set is assumed to be one time-series. This parameter is used with task type forecasting. :vartype time_series_id_column_names: list[str] - :ivar use_stl: Configure STL Decomposition of the time-series target column. Possible values - include: "None", "Season", "SeasonTrend". + :ivar use_stl: Configure STL Decomposition of the time-series target column. Known values are: + "None", "Season", and "SeasonTrend". :vartype use_stl: str or ~azure.mgmt.machinelearningservices.models.UseStl """ _attribute_map = { - 'country_or_region_for_holidays': {'key': 'countryOrRegionForHolidays', 'type': 'str'}, - 'cv_step_size': {'key': 'cvStepSize', 'type': 'int'}, - 'feature_lags': {'key': 'featureLags', 'type': 'str'}, - 'features_unknown_at_forecast_time': {'key': 'featuresUnknownAtForecastTime', 'type': '[str]'}, - 'forecast_horizon': {'key': 'forecastHorizon', 'type': 'ForecastHorizon'}, - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'seasonality': {'key': 'seasonality', 'type': 'Seasonality'}, - 'short_series_handling_config': {'key': 'shortSeriesHandlingConfig', 'type': 'str'}, - 'target_aggregate_function': {'key': 'targetAggregateFunction', 'type': 'str'}, - 'target_lags': {'key': 'targetLags', 'type': 'TargetLags'}, - 'target_rolling_window_size': {'key': 'targetRollingWindowSize', 'type': 'TargetRollingWindowSize'}, - 'time_column_name': {'key': 'timeColumnName', 'type': 'str'}, - 'time_series_id_column_names': {'key': 'timeSeriesIdColumnNames', 'type': '[str]'}, - 'use_stl': {'key': 'useStl', 'type': 'str'}, + "country_or_region_for_holidays": {"key": "countryOrRegionForHolidays", "type": "str"}, + "cv_step_size": {"key": "cvStepSize", "type": "int"}, + "feature_lags": {"key": "featureLags", "type": "str"}, + "features_unknown_at_forecast_time": {"key": "featuresUnknownAtForecastTime", "type": "[str]"}, + "forecast_horizon": {"key": "forecastHorizon", "type": "ForecastHorizon"}, + "frequency": {"key": "frequency", "type": "str"}, + "seasonality": {"key": "seasonality", "type": "Seasonality"}, + "short_series_handling_config": {"key": "shortSeriesHandlingConfig", "type": "str"}, + "target_aggregate_function": {"key": "targetAggregateFunction", "type": "str"}, + "target_lags": {"key": "targetLags", "type": "TargetLags"}, + "target_rolling_window_size": {"key": "targetRollingWindowSize", "type": "TargetRollingWindowSize"}, + "time_column_name": {"key": "timeColumnName", "type": "str"}, + "time_series_id_column_names": {"key": "timeSeriesIdColumnNames", "type": "[str]"}, + "use_stl": {"key": "useStl", "type": "str"}, } def __init__( @@ -14237,20 +13947,20 @@ def __init__( *, country_or_region_for_holidays: Optional[str] = None, cv_step_size: Optional[int] = None, - feature_lags: Optional[Union[str, "FeatureLags"]] = None, + feature_lags: Optional[Union[str, "_models.FeatureLags"]] = None, features_unknown_at_forecast_time: Optional[List[str]] = None, - forecast_horizon: Optional["ForecastHorizon"] = None, + forecast_horizon: Optional["_models.ForecastHorizon"] = None, frequency: Optional[str] = None, - seasonality: Optional["Seasonality"] = None, - short_series_handling_config: Optional[Union[str, "ShortSeriesHandlingConfiguration"]] = None, - target_aggregate_function: Optional[Union[str, "TargetAggregationFunction"]] = None, - target_lags: Optional["TargetLags"] = None, - target_rolling_window_size: Optional["TargetRollingWindowSize"] = None, + seasonality: Optional["_models.Seasonality"] = None, + short_series_handling_config: Optional[Union[str, "_models.ShortSeriesHandlingConfiguration"]] = None, + target_aggregate_function: Optional[Union[str, "_models.TargetAggregationFunction"]] = None, + target_lags: Optional["_models.TargetLags"] = None, + target_rolling_window_size: Optional["_models.TargetRollingWindowSize"] = None, time_column_name: Optional[str] = None, time_series_id_column_names: Optional[List[str]] = None, - use_stl: Optional[Union[str, "UseStl"]] = None, - **kwargs - ): + use_stl: Optional[Union[str, "_models.UseStl"]] = None, + **kwargs: Any + ) -> None: """ :keyword country_or_region_for_holidays: Country or region for holidays for forecasting tasks. These should be ISO 3166 two-letter country/region codes, for example 'US' or 'GB'. @@ -14261,7 +13971,7 @@ def __init__( three days apart. :paramtype cv_step_size: int :keyword feature_lags: Flag for generating lags for the numeric features with 'auto' or null. - Possible values include: "None", "Auto". + Known values are: "None" and "Auto". :paramtype feature_lags: str or ~azure.mgmt.machinelearningservices.models.FeatureLags :keyword features_unknown_at_forecast_time: The feature columns that are available for training but unknown at the time of forecast/inference. @@ -14280,14 +13990,14 @@ def __init__( If seasonality is set to 'auto', it will be inferred. :paramtype seasonality: ~azure.mgmt.machinelearningservices.models.Seasonality :keyword short_series_handling_config: The parameter defining how if AutoML should handle short - time series. Possible values include: "None", "Auto", "Pad", "Drop". + time series. Known values are: "None", "Auto", "Pad", and "Drop". :paramtype short_series_handling_config: str or ~azure.mgmt.machinelearningservices.models.ShortSeriesHandlingConfiguration :keyword target_aggregate_function: The function to be used to aggregate the time series target column to conform to a user specified frequency. If the TargetAggregateFunction is set i.e. not 'None', but the freq parameter is not set, the error is raised. The possible target aggregation functions are: "sum", "max", "min" and "mean". - Possible values include: "None", "Sum", "Max", "Min", "Mean". + Known values are: "None", "Sum", "Max", "Min", and "Mean". :paramtype target_aggregate_function: str or ~azure.mgmt.machinelearningservices.models.TargetAggregationFunction :keyword target_lags: The number of past periods to lag from the target column. @@ -14305,11 +14015,11 @@ def __init__( If grain is not defined, the data set is assumed to be one time-series. This parameter is used with task type forecasting. :paramtype time_series_id_column_names: list[str] - :keyword use_stl: Configure STL Decomposition of the time-series target column. Possible values - include: "None", "Season", "SeasonTrend". + :keyword use_stl: Configure STL Decomposition of the time-series target column. Known values + are: "None", "Season", and "SeasonTrend". :paramtype use_stl: str or ~azure.mgmt.machinelearningservices.models.UseStl """ - super(ForecastingSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.country_or_region_for_holidays = country_or_region_for_holidays self.cv_step_size = cv_step_size self.feature_lags = feature_lags @@ -14351,8 +14061,8 @@ class ForecastingTrainingSettings(TrainingSettings): mode selection. Default is 'auto'. If 'Distributed' then only distributed featurization is used and distributed algorithms are chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". :vartype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode :ivar allowed_training_algorithms: Allowed models for forecasting task. :vartype allowed_training_algorithms: list[str or @@ -14363,33 +14073,33 @@ class ForecastingTrainingSettings(TrainingSettings): """ _attribute_map = { - 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, - 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, - 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, - 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, - 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, - 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, - 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, - 'training_mode': {'key': 'trainingMode', 'type': 'str'}, - 'allowed_training_algorithms': {'key': 'allowedTrainingAlgorithms', 'type': '[str]'}, - 'blocked_training_algorithms': {'key': 'blockedTrainingAlgorithms', 'type': '[str]'}, + "enable_dnn_training": {"key": "enableDnnTraining", "type": "bool"}, + "enable_model_explainability": {"key": "enableModelExplainability", "type": "bool"}, + "enable_onnx_compatible_models": {"key": "enableOnnxCompatibleModels", "type": "bool"}, + "enable_stack_ensemble": {"key": "enableStackEnsemble", "type": "bool"}, + "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, + "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, + "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "training_mode": {"key": "trainingMode", "type": "str"}, + "allowed_training_algorithms": {"key": "allowedTrainingAlgorithms", "type": "[str]"}, + "blocked_training_algorithms": {"key": "blockedTrainingAlgorithms", "type": "[str]"}, } def __init__( self, *, - enable_dnn_training: Optional[bool] = False, - enable_model_explainability: Optional[bool] = True, - enable_onnx_compatible_models: Optional[bool] = False, - enable_stack_ensemble: Optional[bool] = True, - enable_vote_ensemble: Optional[bool] = True, - ensemble_model_download_timeout: Optional[datetime.timedelta] = "PT5M", - stack_ensemble_settings: Optional["StackEnsembleSettings"] = None, - training_mode: Optional[Union[str, "TrainingMode"]] = None, - allowed_training_algorithms: Optional[List[Union[str, "ForecastingModels"]]] = None, - blocked_training_algorithms: Optional[List[Union[str, "ForecastingModels"]]] = None, - **kwargs - ): + enable_dnn_training: bool = False, + enable_model_explainability: bool = True, + enable_onnx_compatible_models: bool = False, + enable_stack_ensemble: bool = True, + enable_vote_ensemble: bool = True, + ensemble_model_download_timeout: datetime.timedelta = "PT5M", + stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + training_mode: Optional[Union[str, "_models.TrainingMode"]] = None, + allowed_training_algorithms: Optional[List[Union[str, "_models.ForecastingModels"]]] = None, + blocked_training_algorithms: Optional[List[Union[str, "_models.ForecastingModels"]]] = None, + **kwargs: Any + ) -> None: """ :keyword enable_dnn_training: Enable recommendation of DNN models. :paramtype enable_dnn_training: bool @@ -14413,8 +14123,8 @@ def __init__( mode selection. Default is 'auto'. If 'Distributed' then only distributed featurization is used and distributed algorithms are chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". :paramtype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode :keyword allowed_training_algorithms: Allowed models for forecasting task. :paramtype allowed_training_algorithms: list[str or @@ -14423,12 +14133,22 @@ def __init__( :paramtype blocked_training_algorithms: list[str or ~azure.mgmt.machinelearningservices.models.ForecastingModels] """ - super(ForecastingTrainingSettings, self).__init__(enable_dnn_training=enable_dnn_training, enable_model_explainability=enable_model_explainability, enable_onnx_compatible_models=enable_onnx_compatible_models, enable_stack_ensemble=enable_stack_ensemble, enable_vote_ensemble=enable_vote_ensemble, ensemble_model_download_timeout=ensemble_model_download_timeout, stack_ensemble_settings=stack_ensemble_settings, training_mode=training_mode, **kwargs) + super().__init__( + enable_dnn_training=enable_dnn_training, + enable_model_explainability=enable_model_explainability, + enable_onnx_compatible_models=enable_onnx_compatible_models, + enable_stack_ensemble=enable_stack_ensemble, + enable_vote_ensemble=enable_vote_ensemble, + ensemble_model_download_timeout=ensemble_model_download_timeout, + stack_ensemble_settings=stack_ensemble_settings, + training_mode=training_mode, + **kwargs + ) self.allowed_training_algorithms = allowed_training_algorithms self.blocked_training_algorithms = blocked_training_algorithms -class FQDNEndpoint(msrest.serialization.Model): +class FQDNEndpoint(_serialization.Model): """FQDNEndpoint. :ivar domain_name: @@ -14438,17 +14158,17 @@ class FQDNEndpoint(msrest.serialization.Model): """ _attribute_map = { - 'domain_name': {'key': 'domainName', 'type': 'str'}, - 'endpoint_details': {'key': 'endpointDetails', 'type': '[FQDNEndpointDetail]'}, + "domain_name": {"key": "domainName", "type": "str"}, + "endpoint_details": {"key": "endpointDetails", "type": "[FQDNEndpointDetail]"}, } def __init__( self, *, domain_name: Optional[str] = None, - endpoint_details: Optional[List["FQDNEndpointDetail"]] = None, - **kwargs - ): + endpoint_details: Optional[List["_models.FQDNEndpointDetail"]] = None, + **kwargs: Any + ) -> None: """ :keyword domain_name: :paramtype domain_name: str @@ -14456,12 +14176,12 @@ def __init__( :paramtype endpoint_details: list[~azure.mgmt.machinelearningservices.models.FQDNEndpointDetail] """ - super(FQDNEndpoint, self).__init__(**kwargs) + super().__init__(**kwargs) self.domain_name = domain_name self.endpoint_details = endpoint_details -class FQDNEndpointDetail(msrest.serialization.Model): +class FQDNEndpointDetail(_serialization.Model): """FQDNEndpointDetail. :ivar port: @@ -14469,24 +14189,19 @@ class FQDNEndpointDetail(msrest.serialization.Model): """ _attribute_map = { - 'port': {'key': 'port', 'type': 'int'}, + "port": {"key": "port", "type": "int"}, } - def __init__( - self, - *, - port: Optional[int] = None, - **kwargs - ): + def __init__(self, *, port: Optional[int] = None, **kwargs: Any) -> None: """ :keyword port: :paramtype port: int """ - super(FQDNEndpointDetail, self).__init__(**kwargs) + super().__init__(**kwargs) self.port = port -class FQDNEndpoints(msrest.serialization.Model): +class FQDNEndpoints(_serialization.Model): """FQDNEndpoints. :ivar category: @@ -14496,29 +14211,25 @@ class FQDNEndpoints(msrest.serialization.Model): """ _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'endpoints': {'key': 'endpoints', 'type': '[FQDNEndpoint]'}, + "category": {"key": "category", "type": "str"}, + "endpoints": {"key": "endpoints", "type": "[FQDNEndpoint]"}, } def __init__( - self, - *, - category: Optional[str] = None, - endpoints: Optional[List["FQDNEndpoint"]] = None, - **kwargs - ): + self, *, category: Optional[str] = None, endpoints: Optional[List["_models.FQDNEndpoint"]] = None, **kwargs: Any + ) -> None: """ :keyword category: :paramtype category: str :keyword endpoints: :paramtype endpoints: list[~azure.mgmt.machinelearningservices.models.FQDNEndpoint] """ - super(FQDNEndpoints, self).__init__(**kwargs) + super().__init__(**kwargs) self.category = category self.endpoints = endpoints -class FQDNEndpointsPropertyBag(msrest.serialization.Model): +class FQDNEndpointsPropertyBag(_serialization.Model): """Property bag for FQDN endpoints result. :ivar properties: @@ -14526,76 +14237,74 @@ class FQDNEndpointsPropertyBag(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'FQDNEndpoints'}, + "properties": {"key": "properties", "type": "FQDNEndpoints"}, } - def __init__( - self, - *, - properties: Optional["FQDNEndpoints"] = None, - **kwargs - ): + def __init__(self, *, properties: Optional["_models.FQDNEndpoints"] = None, **kwargs: Any) -> None: """ :keyword properties: :paramtype properties: ~azure.mgmt.machinelearningservices.models.FQDNEndpoints """ - super(FQDNEndpointsPropertyBag, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class OutboundRule(msrest.serialization.Model): - """Outbound Rule for the managed network of a machine learning workspace. +class OutboundRule(_serialization.Model): + """Outbound rule for the managed network of a machine learning workspace. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: FqdnOutboundRule, PrivateEndpointOutboundRule, ServiceTagOutboundRule. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + FqdnOutboundRule, PrivateEndpointOutboundRule, ServiceTagOutboundRule All required parameters must be populated in order to send to Azure. - :ivar category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". + :ivar category: Category of a managed network outbound rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". :vartype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :ivar status: Type of a managed network Outbound Rule of a machine learning workspace. Possible - values include: "Inactive", "Active". + :ivar status: Type of a managed network outbound rule of a machine learning workspace. Known + values are: "Inactive" and "Active". :vartype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus - :ivar type: Required. Type of a managed network Outbound Rule of a machine learning - workspace.Constant filled by server. Possible values include: "FQDN", "PrivateEndpoint", - "ServiceTag". + :ivar type: Type of a managed network outbound rule of a machine learning workspace. Required. + Known values are: "FQDN", "PrivateEndpoint", and "ServiceTag". :vartype type: str or ~azure.mgmt.machinelearningservices.models.RuleType """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, + "category": {"key": "category", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "type": {"key": "type", "type": "str"}, } _subtype_map = { - 'type': {'FQDN': 'FqdnOutboundRule', 'PrivateEndpoint': 'PrivateEndpointOutboundRule', 'ServiceTag': 'ServiceTagOutboundRule'} + "type": { + "FQDN": "FqdnOutboundRule", + "PrivateEndpoint": "PrivateEndpointOutboundRule", + "ServiceTag": "ServiceTagOutboundRule", + } } def __init__( self, *, - category: Optional[Union[str, "RuleCategory"]] = None, - status: Optional[Union[str, "RuleStatus"]] = None, - **kwargs - ): + category: Optional[Union[str, "_models.RuleCategory"]] = None, + status: Optional[Union[str, "_models.RuleStatus"]] = None, + **kwargs: Any + ) -> None: """ - :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". + :keyword category: Category of a managed network outbound rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :keyword status: Type of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Inactive", "Active". + :keyword status: Type of a managed network outbound rule of a machine learning workspace. Known + values are: "Inactive" and "Active". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus """ - super(OutboundRule, self).__init__(**kwargs) + super().__init__(**kwargs) self.category = category self.status = status - self.type = None # type: Optional[str] + self.type: Optional[str] = None class FqdnOutboundRule(OutboundRule): @@ -14603,66 +14312,64 @@ class FqdnOutboundRule(OutboundRule): All required parameters must be populated in order to send to Azure. - :ivar category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". + :ivar category: Category of a managed network outbound rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". :vartype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :ivar status: Type of a managed network Outbound Rule of a machine learning workspace. Possible - values include: "Inactive", "Active". + :ivar status: Type of a managed network outbound rule of a machine learning workspace. Known + values are: "Inactive" and "Active". :vartype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus - :ivar type: Required. Type of a managed network Outbound Rule of a machine learning - workspace.Constant filled by server. Possible values include: "FQDN", "PrivateEndpoint", - "ServiceTag". + :ivar type: Type of a managed network outbound rule of a machine learning workspace. Required. + Known values are: "FQDN", "PrivateEndpoint", and "ServiceTag". :vartype type: str or ~azure.mgmt.machinelearningservices.models.RuleType :ivar destination: :vartype destination: str """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'destination': {'key': 'destination', 'type': 'str'}, + "category": {"key": "category", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "destination": {"key": "destination", "type": "str"}, } def __init__( self, *, - category: Optional[Union[str, "RuleCategory"]] = None, - status: Optional[Union[str, "RuleStatus"]] = None, + category: Optional[Union[str, "_models.RuleCategory"]] = None, + status: Optional[Union[str, "_models.RuleStatus"]] = None, destination: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". + :keyword category: Category of a managed network outbound rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :keyword status: Type of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Inactive", "Active". + :keyword status: Type of a managed network outbound rule of a machine learning workspace. Known + values are: "Inactive" and "Active". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus :keyword destination: :paramtype destination: str """ - super(FqdnOutboundRule, self).__init__(category=category, status=status, **kwargs) - self.type = 'FQDN' # type: str + super().__init__(category=category, status=status, **kwargs) + self.type: str = "FQDN" self.destination = destination -class GenerationSafetyQualityMetricThreshold(msrest.serialization.Model): +class GenerationSafetyQualityMetricThreshold(_serialization.Model): """Generation safety quality metric threshold definition. All required parameters must be populated in order to send to Azure. - :ivar metric: Required. [Required] Gets or sets the feature attribution metric to calculate. - Possible values include: "AcceptableGroundednessScorePerInstance", - "AggregatedGroundednessPassRate", "AcceptableCoherenceScorePerInstance", - "AggregatedCoherencePassRate", "AcceptableFluencyScorePerInstance", - "AggregatedFluencyPassRate", "AcceptableSimilarityScorePerInstance", - "AggregatedSimilarityPassRate", "AcceptableRelevanceScorePerInstance", - "AggregatedRelevancePassRate". + :ivar metric: [Required] Gets or sets the feature attribution metric to calculate. Required. + Known values are: "AcceptableGroundednessScorePerInstance", "AggregatedGroundednessPassRate", + "AcceptableCoherenceScorePerInstance", "AggregatedCoherencePassRate", + "AcceptableFluencyScorePerInstance", "AggregatedFluencyPassRate", + "AcceptableSimilarityScorePerInstance", "AggregatedSimilarityPassRate", + "AcceptableRelevanceScorePerInstance", and "AggregatedRelevancePassRate". :vartype metric: str or ~azure.mgmt.machinelearningservices.models.GenerationSafetyQualityMetric :ivar threshold: Gets or sets the threshold value. @@ -14671,36 +14378,35 @@ class GenerationSafetyQualityMetricThreshold(msrest.serialization.Model): """ _validation = { - 'metric': {'required': True}, + "metric": {"required": True}, } _attribute_map = { - 'metric': {'key': 'metric', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "metric": {"key": "metric", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } def __init__( self, *, - metric: Union[str, "GenerationSafetyQualityMetric"], - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + metric: Union[str, "_models.GenerationSafetyQualityMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: """ - :keyword metric: Required. [Required] Gets or sets the feature attribution metric to calculate. - Possible values include: "AcceptableGroundednessScorePerInstance", - "AggregatedGroundednessPassRate", "AcceptableCoherenceScorePerInstance", - "AggregatedCoherencePassRate", "AcceptableFluencyScorePerInstance", - "AggregatedFluencyPassRate", "AcceptableSimilarityScorePerInstance", - "AggregatedSimilarityPassRate", "AcceptableRelevanceScorePerInstance", - "AggregatedRelevancePassRate". + :keyword metric: [Required] Gets or sets the feature attribution metric to calculate. Required. + Known values are: "AcceptableGroundednessScorePerInstance", "AggregatedGroundednessPassRate", + "AcceptableCoherenceScorePerInstance", "AggregatedCoherencePassRate", + "AcceptableFluencyScorePerInstance", "AggregatedFluencyPassRate", + "AcceptableSimilarityScorePerInstance", "AggregatedSimilarityPassRate", + "AcceptableRelevanceScorePerInstance", and "AggregatedRelevancePassRate". :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.GenerationSafetyQualityMetric :keyword threshold: Gets or sets the threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ - super(GenerationSafetyQualityMetricThreshold, self).__init__(**kwargs) + super().__init__(**kwargs) self.metric = metric self.threshold = threshold @@ -14710,25 +14416,24 @@ class GenerationSafetyQualityMonitoringSignal(MonitoringSignalBase): All required parameters must be populated in order to send to Azure. - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and "Enabled". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :ivar properties: Property dictionary. Properties can be added, but not removed or altered. :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType - :ivar metric_thresholds: Required. [Required] Gets or sets the metrics to calculate and the - corresponding thresholds. + :ivar metric_thresholds: [Required] Gets or sets the metrics to calculate and the corresponding + thresholds. Required. :vartype metric_thresholds: list[~azure.mgmt.machinelearningservices.models.GenerationSafetyQualityMetricThreshold] :ivar production_data: Gets or sets the target data for computing metrics. :vartype production_data: list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] - :ivar sampling_rate: Required. [Required] The sample rate of the target data, should be greater - than 0 and at most 1. + :ivar sampling_rate: [Required] The sample rate of the target data, should be greater than 0 + and at most 1. Required. :vartype sampling_rate: float :ivar workspace_connection_id: Gets or sets the workspace connection ID used to connect to the content generation endpoint. @@ -14736,67 +14441,67 @@ class GenerationSafetyQualityMonitoringSignal(MonitoringSignalBase): """ _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'sampling_rate': {'required': True}, + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "sampling_rate": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[GenerationSafetyQualityMetricThreshold]'}, - 'production_data': {'key': 'productionData', 'type': '[MonitoringInputDataBase]'}, - 'sampling_rate': {'key': 'samplingRate', 'type': 'float'}, - 'workspace_connection_id': {'key': 'workspaceConnectionId', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[GenerationSafetyQualityMetricThreshold]"}, + "production_data": {"key": "productionData", "type": "[MonitoringInputDataBase]"}, + "sampling_rate": {"key": "samplingRate", "type": "float"}, + "workspace_connection_id": {"key": "workspaceConnectionId", "type": "str"}, } def __init__( self, *, - metric_thresholds: List["GenerationSafetyQualityMetricThreshold"], + metric_thresholds: List["_models.GenerationSafetyQualityMetricThreshold"], sampling_rate: float, - mode: Optional[Union[str, "MonitoringNotificationMode"]] = None, + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, properties: Optional[Dict[str, str]] = None, - production_data: Optional[List["MonitoringInputDataBase"]] = None, + production_data: Optional[List["_models.MonitoringInputDataBase"]] = None, workspace_connection_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :keyword properties: Property dictionary. Properties can be added, but not removed or altered. :paramtype properties: dict[str, str] - :keyword metric_thresholds: Required. [Required] Gets or sets the metrics to calculate and the - corresponding thresholds. + :keyword metric_thresholds: [Required] Gets or sets the metrics to calculate and the + corresponding thresholds. Required. :paramtype metric_thresholds: list[~azure.mgmt.machinelearningservices.models.GenerationSafetyQualityMetricThreshold] :keyword production_data: Gets or sets the target data for computing metrics. :paramtype production_data: list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] - :keyword sampling_rate: Required. [Required] The sample rate of the target data, should be - greater than 0 and at most 1. + :keyword sampling_rate: [Required] The sample rate of the target data, should be greater than 0 + and at most 1. Required. :paramtype sampling_rate: float :keyword workspace_connection_id: Gets or sets the workspace connection ID used to connect to the content generation endpoint. :paramtype workspace_connection_id: str """ - super(GenerationSafetyQualityMonitoringSignal, self).__init__(mode=mode, properties=properties, **kwargs) - self.signal_type = 'GenerationSafetyQuality' # type: str + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "GenerationSafetyQuality" self.metric_thresholds = metric_thresholds self.production_data = production_data self.sampling_rate = sampling_rate self.workspace_connection_id = workspace_connection_id -class GenerationTokenStatisticsMetricThreshold(msrest.serialization.Model): +class GenerationTokenStatisticsMetricThreshold(_serialization.Model): """Generation token statistics metric threshold definition. All required parameters must be populated in order to send to Azure. - :ivar metric: Required. [Required] Gets or sets the feature attribution metric to calculate. - Possible values include: "TotalTokenCount", "TotalTokenCountPerGroup". + :ivar metric: [Required] Gets or sets the feature attribution metric to calculate. Required. + Known values are: "TotalTokenCount" and "TotalTokenCountPerGroup". :vartype metric: str or ~azure.mgmt.machinelearningservices.models.GenerationTokenStatisticsMetric :ivar threshold: Gets or sets the threshold value. @@ -14805,31 +14510,31 @@ class GenerationTokenStatisticsMetricThreshold(msrest.serialization.Model): """ _validation = { - 'metric': {'required': True}, + "metric": {"required": True}, } _attribute_map = { - 'metric': {'key': 'metric', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, + "metric": {"key": "metric", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, } def __init__( self, *, - metric: Union[str, "GenerationTokenStatisticsMetric"], - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + metric: Union[str, "_models.GenerationTokenStatisticsMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: """ - :keyword metric: Required. [Required] Gets or sets the feature attribution metric to calculate. - Possible values include: "TotalTokenCount", "TotalTokenCountPerGroup". + :keyword metric: [Required] Gets or sets the feature attribution metric to calculate. Required. + Known values are: "TotalTokenCount" and "TotalTokenCountPerGroup". :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.GenerationTokenStatisticsMetric :keyword threshold: Gets or sets the threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold """ - super(GenerationTokenStatisticsMetricThreshold, self).__init__(**kwargs) + super().__init__(**kwargs) self.metric = metric self.threshold = threshold @@ -14839,70 +14544,69 @@ class GenerationTokenStatisticsSignal(MonitoringSignalBase): All required parameters must be populated in order to send to Azure. - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and "Enabled". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :ivar properties: Property dictionary. Properties can be added, but not removed or altered. :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType - :ivar metric_thresholds: Required. [Required] Gets or sets the metrics to calculate and the - corresponding thresholds. + :ivar metric_thresholds: [Required] Gets or sets the metrics to calculate and the corresponding + thresholds. Required. :vartype metric_thresholds: list[~azure.mgmt.machinelearningservices.models.GenerationTokenStatisticsMetricThreshold] :ivar production_data: Gets or sets the target data for computing metrics. :vartype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :ivar sampling_rate: Required. [Required] The sample rate of the target data, should be greater - than 0 and at most 1. + :ivar sampling_rate: [Required] The sample rate of the target data, should be greater than 0 + and at most 1. Required. :vartype sampling_rate: float """ _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'sampling_rate': {'required': True}, + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "sampling_rate": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[GenerationTokenStatisticsMetricThreshold]'}, - 'production_data': {'key': 'productionData', 'type': 'MonitoringInputDataBase'}, - 'sampling_rate': {'key': 'samplingRate', 'type': 'float'}, + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[GenerationTokenStatisticsMetricThreshold]"}, + "production_data": {"key": "productionData", "type": "MonitoringInputDataBase"}, + "sampling_rate": {"key": "samplingRate", "type": "float"}, } def __init__( self, *, - metric_thresholds: List["GenerationTokenStatisticsMetricThreshold"], + metric_thresholds: List["_models.GenerationTokenStatisticsMetricThreshold"], sampling_rate: float, - mode: Optional[Union[str, "MonitoringNotificationMode"]] = None, + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, properties: Optional[Dict[str, str]] = None, - production_data: Optional["MonitoringInputDataBase"] = None, - **kwargs - ): + production_data: Optional["_models.MonitoringInputDataBase"] = None, + **kwargs: Any + ) -> None: """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :keyword properties: Property dictionary. Properties can be added, but not removed or altered. :paramtype properties: dict[str, str] - :keyword metric_thresholds: Required. [Required] Gets or sets the metrics to calculate and the - corresponding thresholds. + :keyword metric_thresholds: [Required] Gets or sets the metrics to calculate and the + corresponding thresholds. Required. :paramtype metric_thresholds: list[~azure.mgmt.machinelearningservices.models.GenerationTokenStatisticsMetricThreshold] :keyword production_data: Gets or sets the target data for computing metrics. :paramtype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :keyword sampling_rate: Required. [Required] The sample rate of the target data, should be - greater than 0 and at most 1. + :keyword sampling_rate: [Required] The sample rate of the target data, should be greater than 0 + and at most 1. Required. :paramtype sampling_rate: float """ - super(GenerationTokenStatisticsSignal, self).__init__(mode=mode, properties=properties, **kwargs) - self.signal_type = 'GenerationTokenStatistics' # type: str + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "GenerationTokenStatistics" self.metric_thresholds = metric_thresholds self.production_data = production_data self.sampling_rate = sampling_rate @@ -14913,29 +14617,25 @@ class GridSamplingAlgorithm(SamplingAlgorithm): All required parameters must be populated in order to send to Azure. - :ivar sampling_algorithm_type: Required. [Required] The algorithm used for generating - hyperparameter values, along with configuration properties.Constant filled by server. Possible - values include: "Grid", "Random", "Bayesian". + :ivar sampling_algorithm_type: [Required] The algorithm used for generating hyperparameter + values, along with configuration properties. Required. Known values are: "Grid", "Random", and + "Bayesian". :vartype sampling_algorithm_type: str or ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ _validation = { - 'sampling_algorithm_type': {'required': True}, + "sampling_algorithm_type": {"required": True}, } _attribute_map = { - 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, + "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(GridSamplingAlgorithm, self).__init__(**kwargs) - self.sampling_algorithm_type = 'Grid' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.sampling_algorithm_type: str = "Grid" class HdfsDatastore(DatastoreProperties): @@ -14949,13 +14649,12 @@ class HdfsDatastore(DatastoreProperties): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar credentials: Required. [Required] Account credentials. + :ivar credentials: [Required] Account credentials. Required. :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled - by server. Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", - "AzureFile", "Hdfs", "OneLake". + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", "Hdfs", and "OneLake". :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType :ivar intellectual_property: Intellectual Property details. :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty @@ -14965,53 +14664,53 @@ class HdfsDatastore(DatastoreProperties): :ivar hdfs_server_certificate: The TLS cert of the HDFS server. Needs to be a base64 encoded string. Required if "Https" protocol is selected. :vartype hdfs_server_certificate: str - :ivar name_node_address: Required. [Required] IP Address or DNS HostName. + :ivar name_node_address: [Required] IP Address or DNS HostName. Required. :vartype name_node_address: str :ivar protocol: Protocol used to communicate with the storage account (Https/Http). :vartype protocol: str """ _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'name_node_address': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "name_node_address": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'hdfs_server_certificate': {'key': 'hdfsServerCertificate', 'type': 'str'}, - 'name_node_address': {'key': 'nameNodeAddress', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "hdfs_server_certificate": {"key": "hdfsServerCertificate", "type": "str"}, + "name_node_address": {"key": "nameNodeAddress", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, } def __init__( self, *, - credentials: "DatastoreCredentials", + credentials: "_models.DatastoreCredentials", name_node_address: str, description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - intellectual_property: Optional["IntellectualProperty"] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, hdfs_server_certificate: Optional[str] = None, - protocol: Optional[str] = "http", - **kwargs - ): + protocol: str = "http", + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword credentials: Required. [Required] Account credentials. + :keyword credentials: [Required] Account credentials. Required. :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials :keyword intellectual_property: Intellectual Property details. :paramtype intellectual_property: @@ -15019,19 +14718,26 @@ def __init__( :keyword hdfs_server_certificate: The TLS cert of the HDFS server. Needs to be a base64 encoded string. Required if "Https" protocol is selected. :paramtype hdfs_server_certificate: str - :keyword name_node_address: Required. [Required] IP Address or DNS HostName. + :keyword name_node_address: [Required] IP Address or DNS HostName. Required. :paramtype name_node_address: str :keyword protocol: Protocol used to communicate with the storage account (Https/Http). :paramtype protocol: str """ - super(HdfsDatastore, self).__init__(description=description, properties=properties, tags=tags, credentials=credentials, intellectual_property=intellectual_property, **kwargs) - self.datastore_type = 'Hdfs' # type: str + super().__init__( + description=description, + properties=properties, + tags=tags, + credentials=credentials, + intellectual_property=intellectual_property, + **kwargs + ) + self.datastore_type: str = "Hdfs" self.hdfs_server_certificate = hdfs_server_certificate self.name_node_address = name_node_address self.protocol = protocol -class HDInsightSchema(msrest.serialization.Model): +class HDInsightSchema(_serialization.Model): """HDInsightSchema. :ivar properties: HDInsight compute properties. @@ -15039,24 +14745,19 @@ class HDInsightSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, + "properties": {"key": "properties", "type": "HDInsightProperties"}, } - def __init__( - self, - *, - properties: Optional["HDInsightProperties"] = None, - **kwargs - ): + def __init__(self, *, properties: Optional["_models.HDInsightProperties"] = None, **kwargs: Any) -> None: """ :keyword properties: HDInsight compute properties. :paramtype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties """ - super(HDInsightSchema, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class HDInsight(Compute, HDInsightSchema): +class HDInsight(Compute, HDInsightSchema): # pylint: disable=too-many-instance-attributes """A HDInsight compute. Variables are only populated by the server, and will be ignored when sending a request. @@ -15065,15 +14766,15 @@ class HDInsight(Compute, HDInsightSchema): :ivar properties: HDInsight compute properties. :vartype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType :ivar compute_location: Location for the underlying compute. :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ProvisioningState :ivar description: The description of the Machine Learning compute. @@ -15095,38 +14796,38 @@ class HDInsight(Compute, HDInsightSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'HDInsightProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "HDInsightProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } def __init__( self, *, - properties: Optional["HDInsightProperties"] = None, + properties: Optional["_models.HDInsightProperties"] = None, compute_location: Optional[str] = None, description: Optional[str] = None, resource_id: Optional[str] = None, disable_local_auth: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword properties: HDInsight compute properties. :paramtype properties: ~azure.mgmt.machinelearningservices.models.HDInsightProperties @@ -15140,9 +14841,16 @@ def __init__( MSI and AAD exclusively for authentication. :paramtype disable_local_auth: bool """ - super(HDInsight, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) + super().__init__( + compute_location=compute_location, + description=description, + resource_id=resource_id, + disable_local_auth=disable_local_auth, + properties=properties, + **kwargs + ) self.properties = properties - self.compute_type = 'HDInsight' # type: str + self.compute_type: str = "HDInsight" self.compute_location = compute_location self.provisioning_state = None self.description = description @@ -15154,7 +14862,7 @@ def __init__( self.disable_local_auth = disable_local_auth -class HDInsightProperties(msrest.serialization.Model): +class HDInsightProperties(_serialization.Model): """HDInsight compute properties. :ivar ssh_port: Port open for ssh connections on the master node of the cluster. @@ -15167,9 +14875,9 @@ class HDInsightProperties(msrest.serialization.Model): """ _attribute_map = { - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + "ssh_port": {"key": "sshPort", "type": "int"}, + "address": {"key": "address", "type": "str"}, + "administrator_account": {"key": "administratorAccount", "type": "VirtualMachineSshCredentials"}, } def __init__( @@ -15177,9 +14885,9 @@ def __init__( *, ssh_port: Optional[int] = None, address: Optional[str] = None, - administrator_account: Optional["VirtualMachineSshCredentials"] = None, - **kwargs - ): + administrator_account: Optional["_models.VirtualMachineSshCredentials"] = None, + **kwargs: Any + ) -> None: """ :keyword ssh_port: Port open for ssh connections on the master node of the cluster. :paramtype ssh_port: int @@ -15189,7 +14897,7 @@ def __init__( :paramtype administrator_account: ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials """ - super(HDInsightProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.ssh_port = ssh_port self.address = address self.administrator_account = administrator_account @@ -15200,39 +14908,34 @@ class IdAssetReference(AssetReferenceBase): All required parameters must be populated in order to send to Azure. - :ivar reference_type: Required. [Required] Specifies the type of asset reference.Constant - filled by server. Possible values include: "Id", "DataPath", "OutputPath". + :ivar reference_type: [Required] Specifies the type of asset reference. Required. Known values + are: "Id", "DataPath", and "OutputPath". :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType - :ivar asset_id: Required. [Required] ARM resource ID of the asset. + :ivar asset_id: [Required] ARM resource ID of the asset. Required. :vartype asset_id: str """ _validation = { - 'reference_type': {'required': True}, - 'asset_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "reference_type": {"required": True}, + "asset_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - 'asset_id': {'key': 'assetId', 'type': 'str'}, + "reference_type": {"key": "referenceType", "type": "str"}, + "asset_id": {"key": "assetId", "type": "str"}, } - def __init__( - self, - *, - asset_id: str, - **kwargs - ): + def __init__(self, *, asset_id: str, **kwargs: Any) -> None: """ - :keyword asset_id: Required. [Required] ARM resource ID of the asset. + :keyword asset_id: [Required] ARM resource ID of the asset. Required. :paramtype asset_id: str """ - super(IdAssetReference, self).__init__(**kwargs) - self.reference_type = 'Id' # type: str + super().__init__(**kwargs) + self.reference_type: str = "Id" self.asset_id = asset_id -class IdentityForCmk(msrest.serialization.Model): +class IdentityForCmk(_serialization.Model): """Identity object used for encryption. :ivar user_assigned_identity: UserAssignedIdentity to be used to fetch the encryption key from @@ -15241,25 +14944,20 @@ class IdentityForCmk(msrest.serialization.Model): """ _attribute_map = { - 'user_assigned_identity': {'key': 'userAssignedIdentity', 'type': 'str'}, + "user_assigned_identity": {"key": "userAssignedIdentity", "type": "str"}, } - def __init__( - self, - *, - user_assigned_identity: Optional[str] = None, - **kwargs - ): + def __init__(self, *, user_assigned_identity: Optional[str] = None, **kwargs: Any) -> None: """ :keyword user_assigned_identity: UserAssignedIdentity to be used to fetch the encryption key from keyVault. :paramtype user_assigned_identity: str """ - super(IdentityForCmk, self).__init__(**kwargs) + super().__init__(**kwargs) self.user_assigned_identity = user_assigned_identity -class IdleShutdownSetting(msrest.serialization.Model): +class IdleShutdownSetting(_serialization.Model): """Stops compute instance after user defined period of inactivity. :ivar idle_time_before_shutdown: Time is defined in ISO8601 format. Minimum is 15 min, maximum @@ -15268,74 +14966,70 @@ class IdleShutdownSetting(msrest.serialization.Model): """ _attribute_map = { - 'idle_time_before_shutdown': {'key': 'idleTimeBeforeShutdown', 'type': 'str'}, + "idle_time_before_shutdown": {"key": "idleTimeBeforeShutdown", "type": "str"}, } - def __init__( - self, - *, - idle_time_before_shutdown: Optional[str] = None, - **kwargs - ): + def __init__(self, *, idle_time_before_shutdown: Optional[str] = None, **kwargs: Any) -> None: """ :keyword idle_time_before_shutdown: Time is defined in ISO8601 format. Minimum is 15 min, maximum is 3 days. :paramtype idle_time_before_shutdown: str """ - super(IdleShutdownSetting, self).__init__(**kwargs) + super().__init__(**kwargs) self.idle_time_before_shutdown = idle_time_before_shutdown -class Image(msrest.serialization.Model): +class Image(_serialization.Model): """Image. :ivar additional_properties: Unmatched properties from the message are deserialized to this collection. :vartype additional_properties: dict[str, any] :ivar type: Type of the image. Possible values are: docker - For docker images. azureml - For - AzureML images. Possible values include: "docker", "azureml". Default value: "docker". + AzureML images. Known values are: "docker" and "azureml". :vartype type: str or ~azure.mgmt.machinelearningservices.models.ImageType :ivar reference: Image reference URL. :vartype reference: str """ _attribute_map = { - 'additional_properties': {'key': '', 'type': '{object}'}, - 'type': {'key': 'type', 'type': 'str'}, - 'reference': {'key': 'reference', 'type': 'str'}, + "additional_properties": {"key": "", "type": "{object}"}, + "type": {"key": "type", "type": "str"}, + "reference": {"key": "reference", "type": "str"}, } def __init__( self, *, additional_properties: Optional[Dict[str, Any]] = None, - type: Optional[Union[str, "ImageType"]] = "docker", + type: Union[str, "_models.ImageType"] = "docker", reference: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword additional_properties: Unmatched properties from the message are deserialized to this collection. :paramtype additional_properties: dict[str, any] :keyword type: Type of the image. Possible values are: docker - For docker images. azureml - - For AzureML images. Possible values include: "docker", "azureml". Default value: "docker". + For AzureML images. Known values are: "docker" and "azureml". :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ImageType :keyword reference: Image reference URL. :paramtype reference: str """ - super(Image, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_properties = additional_properties self.type = type self.reference = reference -class ImageVertical(msrest.serialization.Model): +class ImageVertical(_serialization.Model): """Abstract class for AutoML tasks that train image (computer vision) models - -such as Image Classification / Image Classification Multilabel / Image Object Detection / Image Instance Segmentation. + such as Image Classification / Image Classification Multilabel / Image Object Detection / Image + Instance Segmentation. All required parameters must be populated in order to send to Azure. - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings @@ -15349,27 +15043,27 @@ class ImageVertical(msrest.serialization.Model): """ _validation = { - 'limit_settings': {'required': True}, + "limit_settings": {"required": True}, } _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, } def __init__( self, *, - limit_settings: "ImageLimitSettings", - sweep_settings: Optional["ImageSweepSettings"] = None, - validation_data: Optional["MLTableJobInput"] = None, + limit_settings: "_models.ImageLimitSettings", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, validation_data_size: Optional[float] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings @@ -15381,7 +15075,7 @@ def __init__( Applied when validation dataset is not provided. :paramtype validation_data_size: float """ - super(ImageVertical, self).__init__(**kwargs) + super().__init__(**kwargs) self.limit_settings = limit_settings self.sweep_settings = sweep_settings self.validation_data = validation_data @@ -15393,7 +15087,7 @@ class ImageClassificationBase(ImageVertical): All required parameters must be populated in order to send to Azure. - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings @@ -15414,31 +15108,31 @@ class ImageClassificationBase(ImageVertical): """ _validation = { - 'limit_settings': {'required': True}, + "limit_settings": {"required": True}, } _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsClassification'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsClassification]'}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, } def __init__( self, *, - limit_settings: "ImageLimitSettings", - sweep_settings: Optional["ImageSweepSettings"] = None, - validation_data: Optional["MLTableJobInput"] = None, + limit_settings: "_models.ImageLimitSettings", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, validation_data_size: Optional[float] = None, - model_settings: Optional["ImageModelSettingsClassification"] = None, - search_space: Optional[List["ImageModelDistributionSettingsClassification"]] = None, - **kwargs - ): + model_settings: Optional["_models.ImageModelSettingsClassification"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, + **kwargs: Any + ) -> None: """ - :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings @@ -15457,18 +15151,39 @@ def __init__( :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] """ - super(ImageClassificationBase, self).__init__(limit_settings=limit_settings, sweep_settings=sweep_settings, validation_data=validation_data, validation_data_size=validation_data_size, **kwargs) + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + **kwargs + ) self.model_settings = model_settings self.search_space = search_space -class ImageClassification(AutoMLVertical, ImageClassificationBase): - """Image Classification. Multi-class image classification is used when an image is classified with only a single label -from a set of classes - e.g. each image is classified as either an image of a 'cat' or a 'dog' or a 'duck'. +class ImageClassification(ImageClassificationBase, AutoMLVertical): # pylint: disable=too-many-instance-attributes + """Image Classification. Multi-class image classification is used when an image is classified with + only a single label + from a set of classes - e.g. each image is classified as either an image of a 'cat' or a 'dog' + or a 'duck'. All required parameters must be populated in order to send to Azure. - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings @@ -15486,63 +15201,58 @@ class ImageClassification(AutoMLVertical, ImageClassificationBase): hyperparameters. :vartype search_space: list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric to optimize for this task. Possible values include: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + :ivar primary_metric: Primary metric to optimize for this task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and "PrecisionScoreWeighted". :vartype primary_metric: str or ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics """ _validation = { - 'limit_settings': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, + "limit_settings": {"required": True}, } _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsClassification'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsClassification]'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } def __init__( self, *, - limit_settings: "ImageLimitSettings", - training_data: "MLTableJobInput", - sweep_settings: Optional["ImageSweepSettings"] = None, - validation_data: Optional["MLTableJobInput"] = None, - validation_data_size: Optional[float] = None, - model_settings: Optional["ImageModelSettingsClassification"] = None, - search_space: Optional[List["ImageModelDistributionSettingsClassification"]] = None, - log_verbosity: Optional[Union[str, "LogVerbosity"]] = None, + training_data: "_models.MLTableJobInput", + limit_settings: "_models.ImageLimitSettings", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, target_column_name: Optional[str] = None, - primary_metric: Optional[Union[str, "ClassificationPrimaryMetrics"]] = None, - **kwargs - ): - """ - :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsClassification"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, + primary_metric: Optional[Union[str, "_models.ClassificationPrimaryMetrics"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings @@ -15560,41 +15270,60 @@ def __init__( hyperparameters. :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword primary_metric: Primary metric to optimize for this task. Possible values include: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + :keyword primary_metric: Primary metric to optimize for this task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and "PrecisionScoreWeighted". :paramtype primary_metric: str or ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics """ - super(ImageClassification, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, limit_settings=limit_settings, sweep_settings=sweep_settings, validation_data=validation_data, validation_data_size=validation_data_size, model_settings=model_settings, search_space=search_space, **kwargs) + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + model_settings=model_settings, + search_space=search_space, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type: str = "ImageClassification" + self.training_data = training_data + self.primary_metric = primary_metric self.limit_settings = limit_settings self.sweep_settings = sweep_settings self.validation_data = validation_data self.validation_data_size = validation_data_size self.model_settings = model_settings self.search_space = search_space - self.task_type = 'ImageClassification' # type: str - self.primary_metric = primary_metric - self.log_verbosity = log_verbosity - self.target_column_name = target_column_name - self.training_data = training_data -class ImageClassificationMultilabel(AutoMLVertical, ImageClassificationBase): - """Image Classification Multilabel. Multi-label image classification is used when an image could have one or more labels -from a set of labels - e.g. an image could be labeled with both 'cat' and 'dog'. +class ImageClassificationMultilabel( + ImageClassificationBase, AutoMLVertical +): # pylint: disable=too-many-instance-attributes + """Image Classification Multilabel. Multi-label image classification is used when an image could + have one or more labels + from a set of labels - e.g. an image could be labeled with both 'cat' and 'dog'. All required parameters must be populated in order to send to Azure. - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings @@ -15612,63 +15341,58 @@ class ImageClassificationMultilabel(AutoMLVertical, ImageClassificationBase): hyperparameters. :vartype search_space: list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric to optimize for this task. Possible values include: + :ivar primary_metric: Primary metric to optimize for this task. Known values are: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted", "IOU". + "PrecisionScoreWeighted", and "IOU". :vartype primary_metric: str or ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics """ _validation = { - 'limit_settings': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, + "limit_settings": {"required": True}, } _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsClassification'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsClassification]'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsClassification"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsClassification]"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } def __init__( self, *, - limit_settings: "ImageLimitSettings", - training_data: "MLTableJobInput", - sweep_settings: Optional["ImageSweepSettings"] = None, - validation_data: Optional["MLTableJobInput"] = None, - validation_data_size: Optional[float] = None, - model_settings: Optional["ImageModelSettingsClassification"] = None, - search_space: Optional[List["ImageModelDistributionSettingsClassification"]] = None, - log_verbosity: Optional[Union[str, "LogVerbosity"]] = None, + training_data: "_models.MLTableJobInput", + limit_settings: "_models.ImageLimitSettings", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, target_column_name: Optional[str] = None, - primary_metric: Optional[Union[str, "ClassificationMultilabelPrimaryMetrics"]] = None, - **kwargs - ): - """ - :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsClassification"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsClassification"]] = None, + primary_metric: Optional[Union[str, "_models.ClassificationMultilabelPrimaryMetrics"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings @@ -15686,32 +15410,35 @@ def __init__( hyperparameters. :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsClassification] - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword primary_metric: Primary metric to optimize for this task. Possible values include: + :keyword primary_metric: Primary metric to optimize for this task. Known values are: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", - "PrecisionScoreWeighted", "IOU". + "PrecisionScoreWeighted", and "IOU". :paramtype primary_metric: str or ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics """ - super(ImageClassificationMultilabel, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, limit_settings=limit_settings, sweep_settings=sweep_settings, validation_data=validation_data, validation_data_size=validation_data_size, model_settings=model_settings, search_space=search_space, **kwargs) + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + model_settings=model_settings, + search_space=search_space, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type: str = "ImageClassificationMultilabel" + self.training_data = training_data + self.primary_metric = primary_metric self.limit_settings = limit_settings self.sweep_settings = sweep_settings self.validation_data = validation_data self.validation_data_size = validation_data_size self.model_settings = model_settings self.search_space = search_space - self.task_type = 'ImageClassificationMultilabel' # type: str - self.primary_metric = primary_metric - self.log_verbosity = log_verbosity - self.target_column_name = target_column_name - self.training_data = training_data class ImageObjectDetectionBase(ImageVertical): @@ -15719,7 +15446,7 @@ class ImageObjectDetectionBase(ImageVertical): All required parameters must be populated in order to send to Azure. - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings @@ -15740,31 +15467,31 @@ class ImageObjectDetectionBase(ImageVertical): """ _validation = { - 'limit_settings': {'required': True}, + "limit_settings": {"required": True}, } _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsObjectDetection'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsObjectDetection]'}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, } def __init__( self, *, - limit_settings: "ImageLimitSettings", - sweep_settings: Optional["ImageSweepSettings"] = None, - validation_data: Optional["MLTableJobInput"] = None, + limit_settings: "_models.ImageLimitSettings", + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, validation_data_size: Optional[float] = None, - model_settings: Optional["ImageModelSettingsObjectDetection"] = None, - search_space: Optional[List["ImageModelDistributionSettingsObjectDetection"]] = None, - **kwargs - ): + model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, + **kwargs: Any + ) -> None: """ - :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings @@ -15783,18 +15510,40 @@ def __init__( :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] """ - super(ImageObjectDetectionBase, self).__init__(limit_settings=limit_settings, sweep_settings=sweep_settings, validation_data=validation_data, validation_data_size=validation_data_size, **kwargs) + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + **kwargs + ) self.model_settings = model_settings self.search_space = search_space -class ImageInstanceSegmentation(AutoMLVertical, ImageObjectDetectionBase): - """Image Instance Segmentation. Instance segmentation is used to identify objects in an image at the pixel level, -drawing a polygon around each object in the image. +class ImageInstanceSegmentation( + ImageObjectDetectionBase, AutoMLVertical +): # pylint: disable=too-many-instance-attributes + """Image Instance Segmentation. Instance segmentation is used to identify objects in an image at + the pixel level, + drawing a polygon around each object in the image. All required parameters must be populated in order to send to Azure. - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings @@ -15812,62 +15561,56 @@ class ImageInstanceSegmentation(AutoMLVertical, ImageObjectDetectionBase): hyperparameters. :vartype search_space: list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric to optimize for this task. Possible values include: - "MeanAveragePrecision". + :ivar primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" :vartype primary_metric: str or ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics """ _validation = { - 'limit_settings': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, + "limit_settings": {"required": True}, } _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsObjectDetection'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsObjectDetection]'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } def __init__( self, *, - limit_settings: "ImageLimitSettings", - training_data: "MLTableJobInput", - sweep_settings: Optional["ImageSweepSettings"] = None, - validation_data: Optional["MLTableJobInput"] = None, - validation_data_size: Optional[float] = None, - model_settings: Optional["ImageModelSettingsObjectDetection"] = None, - search_space: Optional[List["ImageModelDistributionSettingsObjectDetection"]] = None, - log_verbosity: Optional[Union[str, "LogVerbosity"]] = None, + training_data: "_models.MLTableJobInput", + limit_settings: "_models.ImageLimitSettings", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, target_column_name: Optional[str] = None, - primary_metric: Optional[Union[str, "InstanceSegmentationPrimaryMetrics"]] = None, - **kwargs - ): - """ - :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, + primary_metric: Optional[Union[str, "_models.InstanceSegmentationPrimaryMetrics"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings @@ -15885,34 +15628,36 @@ def __init__( hyperparameters. :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword primary_metric: Primary metric to optimize for this task. Possible values include: - "MeanAveragePrecision". + :keyword primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" :paramtype primary_metric: str or ~azure.mgmt.machinelearningservices.models.InstanceSegmentationPrimaryMetrics """ - super(ImageInstanceSegmentation, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, limit_settings=limit_settings, sweep_settings=sweep_settings, validation_data=validation_data, validation_data_size=validation_data_size, model_settings=model_settings, search_space=search_space, **kwargs) + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + model_settings=model_settings, + search_space=search_space, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type: str = "ImageInstanceSegmentation" + self.training_data = training_data + self.primary_metric = primary_metric self.limit_settings = limit_settings self.sweep_settings = sweep_settings self.validation_data = validation_data self.validation_data_size = validation_data_size self.model_settings = model_settings self.search_space = search_space - self.task_type = 'ImageInstanceSegmentation' # type: str - self.primary_metric = primary_metric - self.log_verbosity = log_verbosity - self.target_column_name = target_column_name - self.training_data = training_data -class ImageLimitSettings(msrest.serialization.Model): +class ImageLimitSettings(_serialization.Model): """Limit settings for the AutoML job. :ivar max_concurrent_trials: Maximum number of concurrent AutoML iterations. @@ -15924,19 +15669,14 @@ class ImageLimitSettings(msrest.serialization.Model): """ _attribute_map = { - 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, - 'max_trials': {'key': 'maxTrials', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, + "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, + "max_trials": {"key": "maxTrials", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, } def __init__( - self, - *, - max_concurrent_trials: Optional[int] = 1, - max_trials: Optional[int] = 1, - timeout: Optional[datetime.timedelta] = "P7D", - **kwargs - ): + self, *, max_concurrent_trials: int = 1, max_trials: int = 1, timeout: datetime.timedelta = "P7D", **kwargs: Any + ) -> None: """ :keyword max_concurrent_trials: Maximum number of concurrent AutoML iterations. :paramtype max_concurrent_trials: int @@ -15945,13 +15685,13 @@ def __init__( :keyword timeout: AutoML job timeout. :paramtype timeout: ~datetime.timedelta """ - super(ImageLimitSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.max_concurrent_trials = max_concurrent_trials self.max_trials = max_trials self.timeout = timeout -class ImageMetadata(msrest.serialization.Model): +class ImageMetadata(_serialization.Model): """Returns metadata about the operating system image for this compute instance. Variables are only populated by the server, and will be ignored when sending a request. @@ -15969,14 +15709,14 @@ class ImageMetadata(msrest.serialization.Model): """ _validation = { - 'os_patching_status': {'readonly': True}, + "os_patching_status": {"readonly": True}, } _attribute_map = { - 'current_image_version': {'key': 'currentImageVersion', 'type': 'str'}, - 'latest_image_version': {'key': 'latestImageVersion', 'type': 'str'}, - 'is_latest_os_image_version': {'key': 'isLatestOsImageVersion', 'type': 'bool'}, - 'os_patching_status': {'key': 'osPatchingStatus', 'type': 'OsPatchingStatus'}, + "current_image_version": {"key": "currentImageVersion", "type": "str"}, + "latest_image_version": {"key": "latestImageVersion", "type": "str"}, + "is_latest_os_image_version": {"key": "isLatestOsImageVersion", "type": "bool"}, + "os_patching_status": {"key": "osPatchingStatus", "type": "OsPatchingStatus"}, } def __init__( @@ -15985,8 +15725,8 @@ def __init__( current_image_version: Optional[str] = None, latest_image_version: Optional[str] = None, is_latest_os_image_version: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword current_image_version: Specifies the current operating system image version this compute instance is running on. @@ -15997,29 +15737,30 @@ def __init__( latest operating system image. :paramtype is_latest_os_image_version: bool """ - super(ImageMetadata, self).__init__(**kwargs) + super().__init__(**kwargs) self.current_image_version = current_image_version self.latest_image_version = latest_image_version self.is_latest_os_image_version = is_latest_os_image_version self.os_patching_status = None -class ImageModelDistributionSettings(msrest.serialization.Model): +class ImageModelDistributionSettings(_serialization.Model): # pylint: disable=too-many-instance-attributes """Distribution expressions to sweep over values of model settings. -:code:` -Some examples are: -``` -ModelName = "choice('seresnext', 'resnest50')"; -LearningRate = "uniform(0.001, 0.01)"; -LayersToFreeze = "choice(0, 2)"; -```` -All distributions can be specified as distribution_name(min, max) or choice(val1, val2, ..., valn) -where distribution name can be: uniform, quniform, loguniform, etc -For more details on how to compose distribution expressions please check the documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :code:` + Some examples are: + ``` + ModelName = "choice('seresnext', 'resnest50')"; + LearningRate = "uniform(0.001, 0.01)"; + LayersToFreeze = "choice(0, 2)"; + ```` + All distributions can be specified as distribution_name(min, max) or choice(val1, val2, ..., + valn) + where distribution name can be: uniform, quniform, loguniform, etc + For more details on how to compose distribution expressions please check the documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. :vartype ams_gradient: str @@ -16102,37 +15843,37 @@ class ImageModelDistributionSettings(msrest.serialization.Model): """ _attribute_map = { - 'ams_gradient': {'key': 'amsGradient', 'type': 'str'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'str'}, - 'beta2': {'key': 'beta2', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'str'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'str'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'str'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'str'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'str'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'str'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'str'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'str'}, - 'nesterov': {'key': 'nesterov', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'str'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'str'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'str'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'str'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'str'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'str'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, - } - - def __init__( + "ams_gradient": {"key": "amsGradient", "type": "str"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "str"}, + "beta2": {"key": "beta2", "type": "str"}, + "distributed": {"key": "distributed", "type": "str"}, + "early_stopping": {"key": "earlyStopping", "type": "str"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "str"}, + "nesterov": {"key": "nesterov", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "str"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, + } + + def __init__( # pylint: disable=too-many-locals self, *, ams_gradient: Optional[str] = None, @@ -16163,8 +15904,8 @@ def __init__( warmup_cosine_lr_cycles: Optional[str] = None, warmup_cosine_lr_warmup_epochs: Optional[str] = None, weight_decay: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. :paramtype ams_gradient: str @@ -16247,7 +15988,7 @@ def __init__( be a float in the range[0, 1]. :paramtype weight_decay: str """ - super(ImageModelDistributionSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.ams_gradient = ams_gradient self.augmentations = augmentations self.beta1 = beta1 @@ -16278,20 +16019,22 @@ def __init__( self.weight_decay = weight_decay -class ImageModelDistributionSettingsClassification(ImageModelDistributionSettings): +class ImageModelDistributionSettingsClassification( + ImageModelDistributionSettings +): # pylint: disable=too-many-instance-attributes """Distribution expressions to sweep over values of model settings. -:code:` -Some examples are: -``` -ModelName = "choice('seresnext', 'resnest50')"; -LearningRate = "uniform(0.001, 0.01)"; -LayersToFreeze = "choice(0, 2)"; -```` -For more details on how to compose distribution expressions please check the documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :code:` + Some examples are: + ``` + ModelName = "choice('seresnext', 'resnest50')"; + LearningRate = "uniform(0.001, 0.01)"; + LayersToFreeze = "choice(0, 2)"; + ```` + For more details on how to compose distribution expressions please check the documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. :vartype ams_gradient: str @@ -16387,41 +16130,41 @@ class ImageModelDistributionSettingsClassification(ImageModelDistributionSetting """ _attribute_map = { - 'ams_gradient': {'key': 'amsGradient', 'type': 'str'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'str'}, - 'beta2': {'key': 'beta2', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'str'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'str'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'str'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'str'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'str'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'str'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'str'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'str'}, - 'nesterov': {'key': 'nesterov', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'str'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'str'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'str'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'str'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'str'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'str'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, - 'training_crop_size': {'key': 'trainingCropSize', 'type': 'str'}, - 'validation_crop_size': {'key': 'validationCropSize', 'type': 'str'}, - 'validation_resize_size': {'key': 'validationResizeSize', 'type': 'str'}, - 'weighted_loss': {'key': 'weightedLoss', 'type': 'str'}, - } - - def __init__( + "ams_gradient": {"key": "amsGradient", "type": "str"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "str"}, + "beta2": {"key": "beta2", "type": "str"}, + "distributed": {"key": "distributed", "type": "str"}, + "early_stopping": {"key": "earlyStopping", "type": "str"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "str"}, + "nesterov": {"key": "nesterov", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "str"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, + "training_crop_size": {"key": "trainingCropSize", "type": "str"}, + "validation_crop_size": {"key": "validationCropSize", "type": "str"}, + "validation_resize_size": {"key": "validationResizeSize", "type": "str"}, + "weighted_loss": {"key": "weightedLoss", "type": "str"}, + } + + def __init__( # pylint: disable=too-many-locals self, *, ams_gradient: Optional[str] = None, @@ -16456,8 +16199,8 @@ def __init__( validation_crop_size: Optional[str] = None, validation_resize_size: Optional[str] = None, weighted_loss: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. :paramtype ams_gradient: str @@ -16553,27 +16296,59 @@ def __init__( 0 or 1 or 2. :paramtype weighted_loss: str """ - super(ImageModelDistributionSettingsClassification, self).__init__(ams_gradient=ams_gradient, augmentations=augmentations, beta1=beta1, beta2=beta2, distributed=distributed, early_stopping=early_stopping, early_stopping_delay=early_stopping_delay, early_stopping_patience=early_stopping_patience, enable_onnx_normalization=enable_onnx_normalization, evaluation_frequency=evaluation_frequency, gradient_accumulation_step=gradient_accumulation_step, layers_to_freeze=layers_to_freeze, learning_rate=learning_rate, learning_rate_scheduler=learning_rate_scheduler, model_name=model_name, momentum=momentum, nesterov=nesterov, number_of_epochs=number_of_epochs, number_of_workers=number_of_workers, optimizer=optimizer, random_seed=random_seed, step_lr_gamma=step_lr_gamma, step_lr_step_size=step_lr_step_size, training_batch_size=training_batch_size, validation_batch_size=validation_batch_size, warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, weight_decay=weight_decay, **kwargs) + super().__init__( + ams_gradient=ams_gradient, + augmentations=augmentations, + beta1=beta1, + beta2=beta2, + distributed=distributed, + early_stopping=early_stopping, + early_stopping_delay=early_stopping_delay, + early_stopping_patience=early_stopping_patience, + enable_onnx_normalization=enable_onnx_normalization, + evaluation_frequency=evaluation_frequency, + gradient_accumulation_step=gradient_accumulation_step, + layers_to_freeze=layers_to_freeze, + learning_rate=learning_rate, + learning_rate_scheduler=learning_rate_scheduler, + model_name=model_name, + momentum=momentum, + nesterov=nesterov, + number_of_epochs=number_of_epochs, + number_of_workers=number_of_workers, + optimizer=optimizer, + random_seed=random_seed, + step_lr_gamma=step_lr_gamma, + step_lr_step_size=step_lr_step_size, + training_batch_size=training_batch_size, + validation_batch_size=validation_batch_size, + warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, + warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, + weight_decay=weight_decay, + **kwargs + ) self.training_crop_size = training_crop_size self.validation_crop_size = validation_crop_size self.validation_resize_size = validation_resize_size self.weighted_loss = weighted_loss -class ImageModelDistributionSettingsObjectDetection(ImageModelDistributionSettings): +class ImageModelDistributionSettingsObjectDetection( + ImageModelDistributionSettings +): # pylint: disable=too-many-instance-attributes """Distribution expressions to sweep over values of model settings. -:code:` -Some examples are: -``` -ModelName = "choice('seresnext', 'resnest50')"; -LearningRate = "uniform(0.001, 0.01)"; -LayersToFreeze = "choice(0, 2)"; -```` -For more details on how to compose distribution expressions please check the documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + :code:` + Some examples are: + ``` + ModelName = "choice('seresnext', 'resnest50')"; + LearningRate = "uniform(0.001, 0.01)"; + LayersToFreeze = "choice(0, 2)"; + ```` + For more details on how to compose distribution expressions please check the documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-tune-hyperparameters + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. :ivar ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. :vartype ams_gradient: str @@ -16708,50 +16483,50 @@ class ImageModelDistributionSettingsObjectDetection(ImageModelDistributionSettin """ _attribute_map = { - 'ams_gradient': {'key': 'amsGradient', 'type': 'str'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'str'}, - 'beta2': {'key': 'beta2', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'str'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'str'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'str'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'str'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'str'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'str'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'str'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'str'}, - 'nesterov': {'key': 'nesterov', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'str'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'str'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'str'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'str'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'str'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'str'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, - 'box_detections_per_image': {'key': 'boxDetectionsPerImage', 'type': 'str'}, - 'box_score_threshold': {'key': 'boxScoreThreshold', 'type': 'str'}, - 'image_size': {'key': 'imageSize', 'type': 'str'}, - 'max_size': {'key': 'maxSize', 'type': 'str'}, - 'min_size': {'key': 'minSize', 'type': 'str'}, - 'model_size': {'key': 'modelSize', 'type': 'str'}, - 'multi_scale': {'key': 'multiScale', 'type': 'str'}, - 'nms_iou_threshold': {'key': 'nmsIouThreshold', 'type': 'str'}, - 'tile_grid_size': {'key': 'tileGridSize', 'type': 'str'}, - 'tile_overlap_ratio': {'key': 'tileOverlapRatio', 'type': 'str'}, - 'tile_predictions_nms_threshold': {'key': 'tilePredictionsNmsThreshold', 'type': 'str'}, - 'validation_iou_threshold': {'key': 'validationIouThreshold', 'type': 'str'}, - 'validation_metric_type': {'key': 'validationMetricType', 'type': 'str'}, - } - - def __init__( + "ams_gradient": {"key": "amsGradient", "type": "str"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "str"}, + "beta2": {"key": "beta2", "type": "str"}, + "distributed": {"key": "distributed", "type": "str"}, + "early_stopping": {"key": "earlyStopping", "type": "str"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "str"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "str"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "str"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "str"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "str"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "str"}, + "nesterov": {"key": "nesterov", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "str"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "str"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "str"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "str"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, + "box_detections_per_image": {"key": "boxDetectionsPerImage", "type": "str"}, + "box_score_threshold": {"key": "boxScoreThreshold", "type": "str"}, + "image_size": {"key": "imageSize", "type": "str"}, + "max_size": {"key": "maxSize", "type": "str"}, + "min_size": {"key": "minSize", "type": "str"}, + "model_size": {"key": "modelSize", "type": "str"}, + "multi_scale": {"key": "multiScale", "type": "str"}, + "nms_iou_threshold": {"key": "nmsIouThreshold", "type": "str"}, + "tile_grid_size": {"key": "tileGridSize", "type": "str"}, + "tile_overlap_ratio": {"key": "tileOverlapRatio", "type": "str"}, + "tile_predictions_nms_threshold": {"key": "tilePredictionsNmsThreshold", "type": "str"}, + "validation_iou_threshold": {"key": "validationIouThreshold", "type": "str"}, + "validation_metric_type": {"key": "validationMetricType", "type": "str"}, + } + + def __init__( # pylint: disable=too-many-locals self, *, ams_gradient: Optional[str] = None, @@ -16795,8 +16570,8 @@ def __init__( tile_predictions_nms_threshold: Optional[str] = None, validation_iou_threshold: Optional[str] = None, validation_metric_type: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword ams_gradient: Enable AMSGrad when optimizer is 'adam' or 'adamw'. :paramtype ams_gradient: str @@ -16931,7 +16706,37 @@ def __init__( be 'none', 'coco', 'voc', or 'coco_voc'. :paramtype validation_metric_type: str """ - super(ImageModelDistributionSettingsObjectDetection, self).__init__(ams_gradient=ams_gradient, augmentations=augmentations, beta1=beta1, beta2=beta2, distributed=distributed, early_stopping=early_stopping, early_stopping_delay=early_stopping_delay, early_stopping_patience=early_stopping_patience, enable_onnx_normalization=enable_onnx_normalization, evaluation_frequency=evaluation_frequency, gradient_accumulation_step=gradient_accumulation_step, layers_to_freeze=layers_to_freeze, learning_rate=learning_rate, learning_rate_scheduler=learning_rate_scheduler, model_name=model_name, momentum=momentum, nesterov=nesterov, number_of_epochs=number_of_epochs, number_of_workers=number_of_workers, optimizer=optimizer, random_seed=random_seed, step_lr_gamma=step_lr_gamma, step_lr_step_size=step_lr_step_size, training_batch_size=training_batch_size, validation_batch_size=validation_batch_size, warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, weight_decay=weight_decay, **kwargs) + super().__init__( + ams_gradient=ams_gradient, + augmentations=augmentations, + beta1=beta1, + beta2=beta2, + distributed=distributed, + early_stopping=early_stopping, + early_stopping_delay=early_stopping_delay, + early_stopping_patience=early_stopping_patience, + enable_onnx_normalization=enable_onnx_normalization, + evaluation_frequency=evaluation_frequency, + gradient_accumulation_step=gradient_accumulation_step, + layers_to_freeze=layers_to_freeze, + learning_rate=learning_rate, + learning_rate_scheduler=learning_rate_scheduler, + model_name=model_name, + momentum=momentum, + nesterov=nesterov, + number_of_epochs=number_of_epochs, + number_of_workers=number_of_workers, + optimizer=optimizer, + random_seed=random_seed, + step_lr_gamma=step_lr_gamma, + step_lr_step_size=step_lr_step_size, + training_batch_size=training_batch_size, + validation_batch_size=validation_batch_size, + warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, + warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, + weight_decay=weight_decay, + **kwargs + ) self.box_detections_per_image = box_detections_per_image self.box_score_threshold = box_score_threshold self.image_size = image_size @@ -16947,10 +16752,10 @@ def __init__( self.validation_metric_type = validation_metric_type -class ImageModelSettings(msrest.serialization.Model): +class ImageModelSettings(_serialization.Model): # pylint: disable=too-many-instance-attributes """Settings used for training the model. -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. :ivar advanced_settings: Settings for advanced scenarios. :vartype advanced_settings: str @@ -17002,7 +16807,7 @@ class ImageModelSettings(msrest.serialization.Model): :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. :vartype learning_rate: float :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". + 'step'. Known values are: "None", "WarmupCosine", and "Step". :vartype learning_rate_scheduler: str or ~azure.mgmt.machinelearningservices.models.LearningRateScheduler :ivar model_name: Name of the model to use for training. @@ -17017,7 +16822,7 @@ class ImageModelSettings(msrest.serialization.Model): :vartype number_of_epochs: int :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. :vartype number_of_workers: int - :ivar optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". + :ivar optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer :ivar random_seed: Random seed to be used when using deterministic training. :vartype random_seed: int @@ -17043,41 +16848,41 @@ class ImageModelSettings(msrest.serialization.Model): """ _attribute_map = { - 'advanced_settings': {'key': 'advancedSettings', 'type': 'str'}, - 'ams_gradient': {'key': 'amsGradient', 'type': 'bool'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'float'}, - 'beta2': {'key': 'beta2', 'type': 'float'}, - 'checkpoint_frequency': {'key': 'checkpointFrequency', 'type': 'int'}, - 'checkpoint_model': {'key': 'checkpointModel', 'type': 'MLFlowModelJobInput'}, - 'checkpoint_run_id': {'key': 'checkpointRunId', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'bool'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'bool'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'int'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'int'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'bool'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'int'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'int'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'int'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'float'}, - 'nesterov': {'key': 'nesterov', 'type': 'bool'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'int'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'float'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'int'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'float'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'int'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, - } - - def __init__( + "advanced_settings": {"key": "advancedSettings", "type": "str"}, + "ams_gradient": {"key": "amsGradient", "type": "bool"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "float"}, + "beta2": {"key": "beta2", "type": "float"}, + "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, + "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, + "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, + "distributed": {"key": "distributed", "type": "bool"}, + "early_stopping": {"key": "earlyStopping", "type": "bool"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "float"}, + "nesterov": {"key": "nesterov", "type": "bool"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "int"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, + } + + def __init__( # pylint: disable=too-many-locals self, *, advanced_settings: Optional[str] = None, @@ -17086,7 +16891,7 @@ def __init__( beta1: Optional[float] = None, beta2: Optional[float] = None, checkpoint_frequency: Optional[int] = None, - checkpoint_model: Optional["MLFlowModelJobInput"] = None, + checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, checkpoint_run_id: Optional[str] = None, distributed: Optional[bool] = None, early_stopping: Optional[bool] = None, @@ -17097,13 +16902,13 @@ def __init__( gradient_accumulation_step: Optional[int] = None, layers_to_freeze: Optional[int] = None, learning_rate: Optional[float] = None, - learning_rate_scheduler: Optional[Union[str, "LearningRateScheduler"]] = None, + learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, model_name: Optional[str] = None, momentum: Optional[float] = None, nesterov: Optional[bool] = None, number_of_epochs: Optional[int] = None, number_of_workers: Optional[int] = None, - optimizer: Optional[Union[str, "StochasticOptimizer"]] = None, + optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, random_seed: Optional[int] = None, step_lr_gamma: Optional[float] = None, step_lr_step_size: Optional[int] = None, @@ -17112,8 +16917,8 @@ def __init__( warmup_cosine_lr_cycles: Optional[float] = None, warmup_cosine_lr_warmup_epochs: Optional[int] = None, weight_decay: Optional[float] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword advanced_settings: Settings for advanced scenarios. :paramtype advanced_settings: str @@ -17167,7 +16972,7 @@ def __init__( :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. :paramtype learning_rate: float :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". + 'step'. Known values are: "None", "WarmupCosine", and "Step". :paramtype learning_rate_scheduler: str or ~azure.mgmt.machinelearningservices.models.LearningRateScheduler :keyword model_name: Name of the model to use for training. @@ -17183,7 +16988,7 @@ def __init__( :paramtype number_of_epochs: int :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. :paramtype number_of_workers: int - :keyword optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". + :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer :keyword random_seed: Random seed to be used when using deterministic training. :paramtype random_seed: int @@ -17207,7 +17012,7 @@ def __init__( be a float in the range[0, 1]. :paramtype weight_decay: float """ - super(ImageModelSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.advanced_settings = advanced_settings self.ams_gradient = ams_gradient self.augmentations = augmentations @@ -17242,10 +17047,10 @@ def __init__( self.weight_decay = weight_decay -class ImageModelSettingsClassification(ImageModelSettings): +class ImageModelSettingsClassification(ImageModelSettings): # pylint: disable=too-many-instance-attributes """Settings used for training the model. -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. :ivar advanced_settings: Settings for advanced scenarios. :vartype advanced_settings: str @@ -17297,7 +17102,7 @@ class ImageModelSettingsClassification(ImageModelSettings): :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. :vartype learning_rate: float :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". + 'step'. Known values are: "None", "WarmupCosine", and "Step". :vartype learning_rate_scheduler: str or ~azure.mgmt.machinelearningservices.models.LearningRateScheduler :ivar model_name: Name of the model to use for training. @@ -17312,7 +17117,7 @@ class ImageModelSettingsClassification(ImageModelSettings): :vartype number_of_epochs: int :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. :vartype number_of_workers: int - :ivar optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". + :ivar optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer :ivar random_seed: Random seed to be used when using deterministic training. :vartype random_seed: int @@ -17351,45 +17156,45 @@ class ImageModelSettingsClassification(ImageModelSettings): """ _attribute_map = { - 'advanced_settings': {'key': 'advancedSettings', 'type': 'str'}, - 'ams_gradient': {'key': 'amsGradient', 'type': 'bool'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'float'}, - 'beta2': {'key': 'beta2', 'type': 'float'}, - 'checkpoint_frequency': {'key': 'checkpointFrequency', 'type': 'int'}, - 'checkpoint_model': {'key': 'checkpointModel', 'type': 'MLFlowModelJobInput'}, - 'checkpoint_run_id': {'key': 'checkpointRunId', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'bool'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'bool'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'int'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'int'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'bool'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'int'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'int'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'int'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'float'}, - 'nesterov': {'key': 'nesterov', 'type': 'bool'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'int'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'float'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'int'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'float'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'int'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, - 'training_crop_size': {'key': 'trainingCropSize', 'type': 'int'}, - 'validation_crop_size': {'key': 'validationCropSize', 'type': 'int'}, - 'validation_resize_size': {'key': 'validationResizeSize', 'type': 'int'}, - 'weighted_loss': {'key': 'weightedLoss', 'type': 'int'}, - } - - def __init__( + "advanced_settings": {"key": "advancedSettings", "type": "str"}, + "ams_gradient": {"key": "amsGradient", "type": "bool"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "float"}, + "beta2": {"key": "beta2", "type": "float"}, + "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, + "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, + "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, + "distributed": {"key": "distributed", "type": "bool"}, + "early_stopping": {"key": "earlyStopping", "type": "bool"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "float"}, + "nesterov": {"key": "nesterov", "type": "bool"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "int"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, + "training_crop_size": {"key": "trainingCropSize", "type": "int"}, + "validation_crop_size": {"key": "validationCropSize", "type": "int"}, + "validation_resize_size": {"key": "validationResizeSize", "type": "int"}, + "weighted_loss": {"key": "weightedLoss", "type": "int"}, + } + + def __init__( # pylint: disable=too-many-locals self, *, advanced_settings: Optional[str] = None, @@ -17398,7 +17203,7 @@ def __init__( beta1: Optional[float] = None, beta2: Optional[float] = None, checkpoint_frequency: Optional[int] = None, - checkpoint_model: Optional["MLFlowModelJobInput"] = None, + checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, checkpoint_run_id: Optional[str] = None, distributed: Optional[bool] = None, early_stopping: Optional[bool] = None, @@ -17409,13 +17214,13 @@ def __init__( gradient_accumulation_step: Optional[int] = None, layers_to_freeze: Optional[int] = None, learning_rate: Optional[float] = None, - learning_rate_scheduler: Optional[Union[str, "LearningRateScheduler"]] = None, + learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, model_name: Optional[str] = None, momentum: Optional[float] = None, nesterov: Optional[bool] = None, number_of_epochs: Optional[int] = None, number_of_workers: Optional[int] = None, - optimizer: Optional[Union[str, "StochasticOptimizer"]] = None, + optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, random_seed: Optional[int] = None, step_lr_gamma: Optional[float] = None, step_lr_step_size: Optional[int] = None, @@ -17428,8 +17233,8 @@ def __init__( validation_crop_size: Optional[int] = None, validation_resize_size: Optional[int] = None, weighted_loss: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword advanced_settings: Settings for advanced scenarios. :paramtype advanced_settings: str @@ -17483,7 +17288,7 @@ def __init__( :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. :paramtype learning_rate: float :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". + 'step'. Known values are: "None", "WarmupCosine", and "Step". :paramtype learning_rate_scheduler: str or ~azure.mgmt.machinelearningservices.models.LearningRateScheduler :keyword model_name: Name of the model to use for training. @@ -17499,7 +17304,7 @@ def __init__( :paramtype number_of_epochs: int :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. :paramtype number_of_workers: int - :keyword optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". + :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer :keyword random_seed: Random seed to be used when using deterministic training. :paramtype random_seed: int @@ -17536,17 +17341,51 @@ def __init__( 0 or 1 or 2. :paramtype weighted_loss: int """ - super(ImageModelSettingsClassification, self).__init__(advanced_settings=advanced_settings, ams_gradient=ams_gradient, augmentations=augmentations, beta1=beta1, beta2=beta2, checkpoint_frequency=checkpoint_frequency, checkpoint_model=checkpoint_model, checkpoint_run_id=checkpoint_run_id, distributed=distributed, early_stopping=early_stopping, early_stopping_delay=early_stopping_delay, early_stopping_patience=early_stopping_patience, enable_onnx_normalization=enable_onnx_normalization, evaluation_frequency=evaluation_frequency, gradient_accumulation_step=gradient_accumulation_step, layers_to_freeze=layers_to_freeze, learning_rate=learning_rate, learning_rate_scheduler=learning_rate_scheduler, model_name=model_name, momentum=momentum, nesterov=nesterov, number_of_epochs=number_of_epochs, number_of_workers=number_of_workers, optimizer=optimizer, random_seed=random_seed, step_lr_gamma=step_lr_gamma, step_lr_step_size=step_lr_step_size, training_batch_size=training_batch_size, validation_batch_size=validation_batch_size, warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, weight_decay=weight_decay, **kwargs) + super().__init__( + advanced_settings=advanced_settings, + ams_gradient=ams_gradient, + augmentations=augmentations, + beta1=beta1, + beta2=beta2, + checkpoint_frequency=checkpoint_frequency, + checkpoint_model=checkpoint_model, + checkpoint_run_id=checkpoint_run_id, + distributed=distributed, + early_stopping=early_stopping, + early_stopping_delay=early_stopping_delay, + early_stopping_patience=early_stopping_patience, + enable_onnx_normalization=enable_onnx_normalization, + evaluation_frequency=evaluation_frequency, + gradient_accumulation_step=gradient_accumulation_step, + layers_to_freeze=layers_to_freeze, + learning_rate=learning_rate, + learning_rate_scheduler=learning_rate_scheduler, + model_name=model_name, + momentum=momentum, + nesterov=nesterov, + number_of_epochs=number_of_epochs, + number_of_workers=number_of_workers, + optimizer=optimizer, + random_seed=random_seed, + step_lr_gamma=step_lr_gamma, + step_lr_step_size=step_lr_step_size, + training_batch_size=training_batch_size, + validation_batch_size=validation_batch_size, + warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, + warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, + weight_decay=weight_decay, + **kwargs + ) self.training_crop_size = training_crop_size self.validation_crop_size = validation_crop_size self.validation_resize_size = validation_resize_size self.weighted_loss = weighted_loss -class ImageModelSettingsObjectDetection(ImageModelSettings): +class ImageModelSettingsObjectDetection(ImageModelSettings): # pylint: disable=too-many-instance-attributes """Settings used for training the model. -For more information on the available settings please visit the official documentation: -https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. + For more information on the available settings please visit the official documentation: + https://docs.microsoft.com/en-us/azure/machine-learning/how-to-auto-train-image-models. :ivar advanced_settings: Settings for advanced scenarios. :vartype advanced_settings: str @@ -17598,7 +17437,7 @@ class ImageModelSettingsObjectDetection(ImageModelSettings): :ivar learning_rate: Initial learning rate. Must be a float in the range [0, 1]. :vartype learning_rate: float :ivar learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". + 'step'. Known values are: "None", "WarmupCosine", and "Step". :vartype learning_rate_scheduler: str or ~azure.mgmt.machinelearningservices.models.LearningRateScheduler :ivar model_name: Name of the model to use for training. @@ -17613,7 +17452,7 @@ class ImageModelSettingsObjectDetection(ImageModelSettings): :vartype number_of_epochs: int :ivar number_of_workers: Number of data loader workers. Must be a non-negative integer. :vartype number_of_workers: int - :ivar optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". + :ivar optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". :vartype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer :ivar random_seed: Random seed to be used when using deterministic training. :vartype random_seed: int @@ -17648,12 +17487,12 @@ class ImageModelSettingsObjectDetection(ImageModelSettings): Note: The training run may get into CUDA OOM if the size is too big. Note: This settings is only supported for the 'yolov5' algorithm. :vartype image_size: int - :ivar log_training_metrics: Enable computing and logging training metrics. Possible values - include: "Enable", "Disable". + :ivar log_training_metrics: Enable computing and logging training metrics. Known values are: + "Enable" and "Disable". :vartype log_training_metrics: str or ~azure.mgmt.machinelearningservices.models.LogTrainingMetrics - :ivar log_validation_loss: Enable computing and logging validation loss. Possible values - include: "Enable", "Disable". + :ivar log_validation_loss: Enable computing and logging validation loss. Known values are: + "Enable" and "Disable". :vartype log_validation_loss: str or ~azure.mgmt.machinelearningservices.models.LogValidationLoss :ivar max_size: Maximum size of the image to be rescaled before feeding it to the backbone. @@ -17666,8 +17505,8 @@ class ImageModelSettingsObjectDetection(ImageModelSettings): :vartype min_size: int :ivar model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. Note: training run may get into CUDA OOM if the model size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. Possible values include: - "None", "Small", "Medium", "Large", "ExtraLarge". + Note: This settings is only supported for the 'yolov5' algorithm. Known values are: "None", + "Small", "Medium", "Large", and "ExtraLarge". :vartype model_size: str or ~azure.mgmt.machinelearningservices.models.ModelSize :ivar multi_scale: Enable multi-scale image by varying image size by +/- 50%. Note: training run may get into CUDA OOM if no sufficient GPU memory. @@ -17693,63 +17532,63 @@ class ImageModelSettingsObjectDetection(ImageModelSettings): :ivar validation_iou_threshold: IOU threshold to use when computing validation metric. Must be float in the range [0, 1]. :vartype validation_iou_threshold: float - :ivar validation_metric_type: Metric computation method to use for validation metrics. Possible - values include: "None", "Coco", "Voc", "CocoVoc". + :ivar validation_metric_type: Metric computation method to use for validation metrics. Known + values are: "None", "Coco", "Voc", and "CocoVoc". :vartype validation_metric_type: str or ~azure.mgmt.machinelearningservices.models.ValidationMetricType """ _attribute_map = { - 'advanced_settings': {'key': 'advancedSettings', 'type': 'str'}, - 'ams_gradient': {'key': 'amsGradient', 'type': 'bool'}, - 'augmentations': {'key': 'augmentations', 'type': 'str'}, - 'beta1': {'key': 'beta1', 'type': 'float'}, - 'beta2': {'key': 'beta2', 'type': 'float'}, - 'checkpoint_frequency': {'key': 'checkpointFrequency', 'type': 'int'}, - 'checkpoint_model': {'key': 'checkpointModel', 'type': 'MLFlowModelJobInput'}, - 'checkpoint_run_id': {'key': 'checkpointRunId', 'type': 'str'}, - 'distributed': {'key': 'distributed', 'type': 'bool'}, - 'early_stopping': {'key': 'earlyStopping', 'type': 'bool'}, - 'early_stopping_delay': {'key': 'earlyStoppingDelay', 'type': 'int'}, - 'early_stopping_patience': {'key': 'earlyStoppingPatience', 'type': 'int'}, - 'enable_onnx_normalization': {'key': 'enableOnnxNormalization', 'type': 'bool'}, - 'evaluation_frequency': {'key': 'evaluationFrequency', 'type': 'int'}, - 'gradient_accumulation_step': {'key': 'gradientAccumulationStep', 'type': 'int'}, - 'layers_to_freeze': {'key': 'layersToFreeze', 'type': 'int'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'momentum': {'key': 'momentum', 'type': 'float'}, - 'nesterov': {'key': 'nesterov', 'type': 'bool'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, - 'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'}, - 'optimizer': {'key': 'optimizer', 'type': 'str'}, - 'random_seed': {'key': 'randomSeed', 'type': 'int'}, - 'step_lr_gamma': {'key': 'stepLRGamma', 'type': 'float'}, - 'step_lr_step_size': {'key': 'stepLRStepSize', 'type': 'int'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, - 'warmup_cosine_lr_cycles': {'key': 'warmupCosineLRCycles', 'type': 'float'}, - 'warmup_cosine_lr_warmup_epochs': {'key': 'warmupCosineLRWarmupEpochs', 'type': 'int'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, - 'box_detections_per_image': {'key': 'boxDetectionsPerImage', 'type': 'int'}, - 'box_score_threshold': {'key': 'boxScoreThreshold', 'type': 'float'}, - 'image_size': {'key': 'imageSize', 'type': 'int'}, - 'log_training_metrics': {'key': 'logTrainingMetrics', 'type': 'str'}, - 'log_validation_loss': {'key': 'logValidationLoss', 'type': 'str'}, - 'max_size': {'key': 'maxSize', 'type': 'int'}, - 'min_size': {'key': 'minSize', 'type': 'int'}, - 'model_size': {'key': 'modelSize', 'type': 'str'}, - 'multi_scale': {'key': 'multiScale', 'type': 'bool'}, - 'nms_iou_threshold': {'key': 'nmsIouThreshold', 'type': 'float'}, - 'tile_grid_size': {'key': 'tileGridSize', 'type': 'str'}, - 'tile_overlap_ratio': {'key': 'tileOverlapRatio', 'type': 'float'}, - 'tile_predictions_nms_threshold': {'key': 'tilePredictionsNmsThreshold', 'type': 'float'}, - 'validation_iou_threshold': {'key': 'validationIouThreshold', 'type': 'float'}, - 'validation_metric_type': {'key': 'validationMetricType', 'type': 'str'}, - } - - def __init__( + "advanced_settings": {"key": "advancedSettings", "type": "str"}, + "ams_gradient": {"key": "amsGradient", "type": "bool"}, + "augmentations": {"key": "augmentations", "type": "str"}, + "beta1": {"key": "beta1", "type": "float"}, + "beta2": {"key": "beta2", "type": "float"}, + "checkpoint_frequency": {"key": "checkpointFrequency", "type": "int"}, + "checkpoint_model": {"key": "checkpointModel", "type": "MLFlowModelJobInput"}, + "checkpoint_run_id": {"key": "checkpointRunId", "type": "str"}, + "distributed": {"key": "distributed", "type": "bool"}, + "early_stopping": {"key": "earlyStopping", "type": "bool"}, + "early_stopping_delay": {"key": "earlyStoppingDelay", "type": "int"}, + "early_stopping_patience": {"key": "earlyStoppingPatience", "type": "int"}, + "enable_onnx_normalization": {"key": "enableOnnxNormalization", "type": "bool"}, + "evaluation_frequency": {"key": "evaluationFrequency", "type": "int"}, + "gradient_accumulation_step": {"key": "gradientAccumulationStep", "type": "int"}, + "layers_to_freeze": {"key": "layersToFreeze", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "momentum": {"key": "momentum", "type": "float"}, + "nesterov": {"key": "nesterov", "type": "bool"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "number_of_workers": {"key": "numberOfWorkers", "type": "int"}, + "optimizer": {"key": "optimizer", "type": "str"}, + "random_seed": {"key": "randomSeed", "type": "int"}, + "step_lr_gamma": {"key": "stepLRGamma", "type": "float"}, + "step_lr_step_size": {"key": "stepLRStepSize", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_cosine_lr_cycles": {"key": "warmupCosineLRCycles", "type": "float"}, + "warmup_cosine_lr_warmup_epochs": {"key": "warmupCosineLRWarmupEpochs", "type": "int"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, + "box_detections_per_image": {"key": "boxDetectionsPerImage", "type": "int"}, + "box_score_threshold": {"key": "boxScoreThreshold", "type": "float"}, + "image_size": {"key": "imageSize", "type": "int"}, + "log_training_metrics": {"key": "logTrainingMetrics", "type": "str"}, + "log_validation_loss": {"key": "logValidationLoss", "type": "str"}, + "max_size": {"key": "maxSize", "type": "int"}, + "min_size": {"key": "minSize", "type": "int"}, + "model_size": {"key": "modelSize", "type": "str"}, + "multi_scale": {"key": "multiScale", "type": "bool"}, + "nms_iou_threshold": {"key": "nmsIouThreshold", "type": "float"}, + "tile_grid_size": {"key": "tileGridSize", "type": "str"}, + "tile_overlap_ratio": {"key": "tileOverlapRatio", "type": "float"}, + "tile_predictions_nms_threshold": {"key": "tilePredictionsNmsThreshold", "type": "float"}, + "validation_iou_threshold": {"key": "validationIouThreshold", "type": "float"}, + "validation_metric_type": {"key": "validationMetricType", "type": "str"}, + } + + def __init__( # pylint: disable=too-many-locals self, *, advanced_settings: Optional[str] = None, @@ -17758,7 +17597,7 @@ def __init__( beta1: Optional[float] = None, beta2: Optional[float] = None, checkpoint_frequency: Optional[int] = None, - checkpoint_model: Optional["MLFlowModelJobInput"] = None, + checkpoint_model: Optional["_models.MLFlowModelJobInput"] = None, checkpoint_run_id: Optional[str] = None, distributed: Optional[bool] = None, early_stopping: Optional[bool] = None, @@ -17769,13 +17608,13 @@ def __init__( gradient_accumulation_step: Optional[int] = None, layers_to_freeze: Optional[int] = None, learning_rate: Optional[float] = None, - learning_rate_scheduler: Optional[Union[str, "LearningRateScheduler"]] = None, + learning_rate_scheduler: Optional[Union[str, "_models.LearningRateScheduler"]] = None, model_name: Optional[str] = None, momentum: Optional[float] = None, nesterov: Optional[bool] = None, number_of_epochs: Optional[int] = None, number_of_workers: Optional[int] = None, - optimizer: Optional[Union[str, "StochasticOptimizer"]] = None, + optimizer: Optional[Union[str, "_models.StochasticOptimizer"]] = None, random_seed: Optional[int] = None, step_lr_gamma: Optional[float] = None, step_lr_step_size: Optional[int] = None, @@ -17787,20 +17626,20 @@ def __init__( box_detections_per_image: Optional[int] = None, box_score_threshold: Optional[float] = None, image_size: Optional[int] = None, - log_training_metrics: Optional[Union[str, "LogTrainingMetrics"]] = None, - log_validation_loss: Optional[Union[str, "LogValidationLoss"]] = None, + log_training_metrics: Optional[Union[str, "_models.LogTrainingMetrics"]] = None, + log_validation_loss: Optional[Union[str, "_models.LogValidationLoss"]] = None, max_size: Optional[int] = None, min_size: Optional[int] = None, - model_size: Optional[Union[str, "ModelSize"]] = None, + model_size: Optional[Union[str, "_models.ModelSize"]] = None, multi_scale: Optional[bool] = None, nms_iou_threshold: Optional[float] = None, tile_grid_size: Optional[str] = None, tile_overlap_ratio: Optional[float] = None, tile_predictions_nms_threshold: Optional[float] = None, validation_iou_threshold: Optional[float] = None, - validation_metric_type: Optional[Union[str, "ValidationMetricType"]] = None, - **kwargs - ): + validation_metric_type: Optional[Union[str, "_models.ValidationMetricType"]] = None, + **kwargs: Any + ) -> None: """ :keyword advanced_settings: Settings for advanced scenarios. :paramtype advanced_settings: str @@ -17854,7 +17693,7 @@ def __init__( :keyword learning_rate: Initial learning rate. Must be a float in the range [0, 1]. :paramtype learning_rate: float :keyword learning_rate_scheduler: Type of learning rate scheduler. Must be 'warmup_cosine' or - 'step'. Possible values include: "None", "WarmupCosine", "Step". + 'step'. Known values are: "None", "WarmupCosine", and "Step". :paramtype learning_rate_scheduler: str or ~azure.mgmt.machinelearningservices.models.LearningRateScheduler :keyword model_name: Name of the model to use for training. @@ -17870,7 +17709,7 @@ def __init__( :paramtype number_of_epochs: int :keyword number_of_workers: Number of data loader workers. Must be a non-negative integer. :paramtype number_of_workers: int - :keyword optimizer: Type of optimizer. Possible values include: "None", "Sgd", "Adam", "Adamw". + :keyword optimizer: Type of optimizer. Known values are: "None", "Sgd", "Adam", and "Adamw". :paramtype optimizer: str or ~azure.mgmt.machinelearningservices.models.StochasticOptimizer :keyword random_seed: Random seed to be used when using deterministic training. :paramtype random_seed: int @@ -17905,12 +17744,12 @@ def __init__( Note: The training run may get into CUDA OOM if the size is too big. Note: This settings is only supported for the 'yolov5' algorithm. :paramtype image_size: int - :keyword log_training_metrics: Enable computing and logging training metrics. Possible values - include: "Enable", "Disable". + :keyword log_training_metrics: Enable computing and logging training metrics. Known values are: + "Enable" and "Disable". :paramtype log_training_metrics: str or ~azure.mgmt.machinelearningservices.models.LogTrainingMetrics - :keyword log_validation_loss: Enable computing and logging validation loss. Possible values - include: "Enable", "Disable". + :keyword log_validation_loss: Enable computing and logging validation loss. Known values are: + "Enable" and "Disable". :paramtype log_validation_loss: str or ~azure.mgmt.machinelearningservices.models.LogValidationLoss :keyword max_size: Maximum size of the image to be rescaled before feeding it to the backbone. @@ -17923,8 +17762,8 @@ def __init__( :paramtype min_size: int :keyword model_size: Model size. Must be 'small', 'medium', 'large', or 'xlarge'. Note: training run may get into CUDA OOM if the model size is too big. - Note: This settings is only supported for the 'yolov5' algorithm. Possible values include: - "None", "Small", "Medium", "Large", "ExtraLarge". + Note: This settings is only supported for the 'yolov5' algorithm. Known values are: "None", + "Small", "Medium", "Large", and "ExtraLarge". :paramtype model_size: str or ~azure.mgmt.machinelearningservices.models.ModelSize :keyword multi_scale: Enable multi-scale image by varying image size by +/- 50%. Note: training run may get into CUDA OOM if no sufficient GPU memory. @@ -17950,12 +17789,46 @@ def __init__( :keyword validation_iou_threshold: IOU threshold to use when computing validation metric. Must be float in the range [0, 1]. :paramtype validation_iou_threshold: float - :keyword validation_metric_type: Metric computation method to use for validation metrics. - Possible values include: "None", "Coco", "Voc", "CocoVoc". + :keyword validation_metric_type: Metric computation method to use for validation metrics. Known + values are: "None", "Coco", "Voc", and "CocoVoc". :paramtype validation_metric_type: str or ~azure.mgmt.machinelearningservices.models.ValidationMetricType """ - super(ImageModelSettingsObjectDetection, self).__init__(advanced_settings=advanced_settings, ams_gradient=ams_gradient, augmentations=augmentations, beta1=beta1, beta2=beta2, checkpoint_frequency=checkpoint_frequency, checkpoint_model=checkpoint_model, checkpoint_run_id=checkpoint_run_id, distributed=distributed, early_stopping=early_stopping, early_stopping_delay=early_stopping_delay, early_stopping_patience=early_stopping_patience, enable_onnx_normalization=enable_onnx_normalization, evaluation_frequency=evaluation_frequency, gradient_accumulation_step=gradient_accumulation_step, layers_to_freeze=layers_to_freeze, learning_rate=learning_rate, learning_rate_scheduler=learning_rate_scheduler, model_name=model_name, momentum=momentum, nesterov=nesterov, number_of_epochs=number_of_epochs, number_of_workers=number_of_workers, optimizer=optimizer, random_seed=random_seed, step_lr_gamma=step_lr_gamma, step_lr_step_size=step_lr_step_size, training_batch_size=training_batch_size, validation_batch_size=validation_batch_size, warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, weight_decay=weight_decay, **kwargs) + super().__init__( + advanced_settings=advanced_settings, + ams_gradient=ams_gradient, + augmentations=augmentations, + beta1=beta1, + beta2=beta2, + checkpoint_frequency=checkpoint_frequency, + checkpoint_model=checkpoint_model, + checkpoint_run_id=checkpoint_run_id, + distributed=distributed, + early_stopping=early_stopping, + early_stopping_delay=early_stopping_delay, + early_stopping_patience=early_stopping_patience, + enable_onnx_normalization=enable_onnx_normalization, + evaluation_frequency=evaluation_frequency, + gradient_accumulation_step=gradient_accumulation_step, + layers_to_freeze=layers_to_freeze, + learning_rate=learning_rate, + learning_rate_scheduler=learning_rate_scheduler, + model_name=model_name, + momentum=momentum, + nesterov=nesterov, + number_of_epochs=number_of_epochs, + number_of_workers=number_of_workers, + optimizer=optimizer, + random_seed=random_seed, + step_lr_gamma=step_lr_gamma, + step_lr_step_size=step_lr_step_size, + training_batch_size=training_batch_size, + validation_batch_size=validation_batch_size, + warmup_cosine_lr_cycles=warmup_cosine_lr_cycles, + warmup_cosine_lr_warmup_epochs=warmup_cosine_lr_warmup_epochs, + weight_decay=weight_decay, + **kwargs + ) self.box_detections_per_image = box_detections_per_image self.box_score_threshold = box_score_threshold self.image_size = image_size @@ -17973,13 +17846,27 @@ def __init__( self.validation_metric_type = validation_metric_type -class ImageObjectDetection(AutoMLVertical, ImageObjectDetectionBase): - """Image Object Detection. Object detection is used to identify objects in an image and locate each object with a -bounding box e.g. locate all dogs and cats in an image and draw a bounding box around each. +class ImageObjectDetection(ImageObjectDetectionBase, AutoMLVertical): # pylint: disable=too-many-instance-attributes + """Image Object Detection. Object detection is used to identify objects in an image and locate + each object with a + bounding box e.g. locate all dogs and cats in an image and draw a bounding box around each. All required parameters must be populated in order to send to Azure. - :ivar limit_settings: Required. [Required] Limit settings for the AutoML job. + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :ivar limit_settings: [Required] Limit settings for the AutoML job. Required. :vartype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings :ivar sweep_settings: Model sweeping and hyperparameter sweeping related settings. :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings @@ -17997,62 +17884,56 @@ class ImageObjectDetection(AutoMLVertical, ImageObjectDetectionBase): hyperparameters. :vartype search_space: list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric to optimize for this task. Possible values include: - "MeanAveragePrecision". + :ivar primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" :vartype primary_metric: str or ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics """ _validation = { - 'limit_settings': {'required': True}, - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, + "limit_settings": {"required": True}, } _attribute_map = { - 'limit_settings': {'key': 'limitSettings', 'type': 'ImageLimitSettings'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'ImageSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'model_settings': {'key': 'modelSettings', 'type': 'ImageModelSettingsObjectDetection'}, - 'search_space': {'key': 'searchSpace', 'type': '[ImageModelDistributionSettingsObjectDetection]'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "limit_settings": {"key": "limitSettings", "type": "ImageLimitSettings"}, + "sweep_settings": {"key": "sweepSettings", "type": "ImageSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "model_settings": {"key": "modelSettings", "type": "ImageModelSettingsObjectDetection"}, + "search_space": {"key": "searchSpace", "type": "[ImageModelDistributionSettingsObjectDetection]"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } def __init__( self, *, - limit_settings: "ImageLimitSettings", - training_data: "MLTableJobInput", - sweep_settings: Optional["ImageSweepSettings"] = None, - validation_data: Optional["MLTableJobInput"] = None, - validation_data_size: Optional[float] = None, - model_settings: Optional["ImageModelSettingsObjectDetection"] = None, - search_space: Optional[List["ImageModelDistributionSettingsObjectDetection"]] = None, - log_verbosity: Optional[Union[str, "LogVerbosity"]] = None, + training_data: "_models.MLTableJobInput", + limit_settings: "_models.ImageLimitSettings", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, target_column_name: Optional[str] = None, - primary_metric: Optional[Union[str, "ObjectDetectionPrimaryMetrics"]] = None, - **kwargs - ): - """ - :keyword limit_settings: Required. [Required] Limit settings for the AutoML job. + sweep_settings: Optional["_models.ImageSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + validation_data_size: Optional[float] = None, + model_settings: Optional["_models.ImageModelSettingsObjectDetection"] = None, + search_space: Optional[List["_models.ImageModelDistributionSettingsObjectDetection"]] = None, + primary_metric: Optional[Union[str, "_models.ObjectDetectionPrimaryMetrics"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput + :keyword limit_settings: [Required] Limit settings for the AutoML job. Required. :paramtype limit_settings: ~azure.mgmt.machinelearningservices.models.ImageLimitSettings :keyword sweep_settings: Model sweeping and hyperparameter sweeping related settings. :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.ImageSweepSettings @@ -18070,71 +17951,73 @@ def __init__( hyperparameters. :paramtype search_space: list[~azure.mgmt.machinelearningservices.models.ImageModelDistributionSettingsObjectDetection] - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword primary_metric: Primary metric to optimize for this task. Possible values include: - "MeanAveragePrecision". + :keyword primary_metric: Primary metric to optimize for this task. "MeanAveragePrecision" :paramtype primary_metric: str or ~azure.mgmt.machinelearningservices.models.ObjectDetectionPrimaryMetrics """ - super(ImageObjectDetection, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, limit_settings=limit_settings, sweep_settings=sweep_settings, validation_data=validation_data, validation_data_size=validation_data_size, model_settings=model_settings, search_space=search_space, **kwargs) + super().__init__( + limit_settings=limit_settings, + sweep_settings=sweep_settings, + validation_data=validation_data, + validation_data_size=validation_data_size, + model_settings=model_settings, + search_space=search_space, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type: str = "ImageObjectDetection" + self.training_data = training_data + self.primary_metric = primary_metric self.limit_settings = limit_settings self.sweep_settings = sweep_settings self.validation_data = validation_data self.validation_data_size = validation_data_size self.model_settings = model_settings self.search_space = search_space - self.task_type = 'ImageObjectDetection' # type: str - self.primary_metric = primary_metric - self.log_verbosity = log_verbosity - self.target_column_name = target_column_name - self.training_data = training_data -class ImageSweepSettings(msrest.serialization.Model): +class ImageSweepSettings(_serialization.Model): """Model sweeping and hyperparameter sweeping related settings. All required parameters must be populated in order to send to Azure. :ivar early_termination: Type of early termination policy. :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :ivar sampling_algorithm: Required. [Required] Type of the hyperparameter sampling algorithms. - Possible values include: "Grid", "Random", "Bayesian". + :ivar sampling_algorithm: [Required] Type of the hyperparameter sampling algorithms. Required. + Known values are: "Grid", "Random", and "Bayesian". :vartype sampling_algorithm: str or ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ _validation = { - 'sampling_algorithm': {'required': True}, + "sampling_algorithm": {"required": True}, } _attribute_map = { - 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, - 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'}, + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "str"}, } def __init__( self, *, - sampling_algorithm: Union[str, "SamplingAlgorithmType"], - early_termination: Optional["EarlyTerminationPolicy"] = None, - **kwargs - ): + sampling_algorithm: Union[str, "_models.SamplingAlgorithmType"], + early_termination: Optional["_models.EarlyTerminationPolicy"] = None, + **kwargs: Any + ) -> None: """ :keyword early_termination: Type of early termination policy. :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :keyword sampling_algorithm: Required. [Required] Type of the hyperparameter sampling - algorithms. Possible values include: "Grid", "Random", "Bayesian". + :keyword sampling_algorithm: [Required] Type of the hyperparameter sampling algorithms. + Required. Known values are: "Grid", "Random", and "Bayesian". :paramtype sampling_algorithm: str or ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ - super(ImageSweepSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.early_termination = early_termination self.sampling_algorithm = sampling_algorithm @@ -18144,75 +18027,69 @@ class ImportDataAction(ScheduleActionBase): All required parameters must be populated in order to send to Azure. - :ivar action_type: Required. [Required] Specifies the action type of the schedule.Constant - filled by server. Possible values include: "CreateJob", "InvokeBatchEndpoint", "ImportData", - "CreateMonitor". + :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values + are: "CreateJob", "InvokeBatchEndpoint", "ImportData", and "CreateMonitor". :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType - :ivar data_import_definition: Required. [Required] Defines Schedule action definition details. + :ivar data_import_definition: [Required] Defines Schedule action definition details. Required. :vartype data_import_definition: ~azure.mgmt.machinelearningservices.models.DataImport """ _validation = { - 'action_type': {'required': True}, - 'data_import_definition': {'required': True}, + "action_type": {"required": True}, + "data_import_definition": {"required": True}, } _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, - 'data_import_definition': {'key': 'dataImportDefinition', 'type': 'DataImport'}, + "action_type": {"key": "actionType", "type": "str"}, + "data_import_definition": {"key": "dataImportDefinition", "type": "DataImport"}, } - def __init__( - self, - *, - data_import_definition: "DataImport", - **kwargs - ): + def __init__(self, *, data_import_definition: "_models.DataImport", **kwargs: Any) -> None: """ - :keyword data_import_definition: Required. [Required] Defines Schedule action definition - details. + :keyword data_import_definition: [Required] Defines Schedule action definition details. + Required. :paramtype data_import_definition: ~azure.mgmt.machinelearningservices.models.DataImport """ - super(ImportDataAction, self).__init__(**kwargs) - self.action_type = 'ImportData' # type: str + super().__init__(**kwargs) + self.action_type: str = "ImportData" self.data_import_definition = data_import_definition -class IndexColumn(msrest.serialization.Model): +class IndexColumn(_serialization.Model): """Dto object representing index column. :ivar column_name: Specifies the column name. :vartype column_name: str - :ivar data_type: Specifies the data type. Possible values include: "String", "Integer", "Long", - "Float", "Double", "Binary", "Datetime", "Boolean". + :ivar data_type: Specifies the data type. Known values are: "String", "Integer", "Long", + "Float", "Double", "Binary", "Datetime", and "Boolean". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.FeatureDataType """ _attribute_map = { - 'column_name': {'key': 'columnName', 'type': 'str'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, + "column_name": {"key": "columnName", "type": "str"}, + "data_type": {"key": "dataType", "type": "str"}, } def __init__( self, *, column_name: Optional[str] = None, - data_type: Optional[Union[str, "FeatureDataType"]] = None, - **kwargs - ): + data_type: Optional[Union[str, "_models.FeatureDataType"]] = None, + **kwargs: Any + ) -> None: """ :keyword column_name: Specifies the column name. :paramtype column_name: str - :keyword data_type: Specifies the data type. Possible values include: "String", "Integer", - "Long", "Float", "Double", "Binary", "Datetime", "Boolean". + :keyword data_type: Specifies the data type. Known values are: "String", "Integer", "Long", + "Float", "Double", "Binary", "Datetime", and "Boolean". :paramtype data_type: str or ~azure.mgmt.machinelearningservices.models.FeatureDataType """ - super(IndexColumn, self).__init__(**kwargs) + super().__init__(**kwargs) self.column_name = column_name self.data_type = data_type -class InferenceContainerProperties(msrest.serialization.Model): +class InferenceContainerProperties(_serialization.Model): """InferenceContainerProperties. :ivar liveness_route: The route to check the liveness of the inference server container. @@ -18225,19 +18102,19 @@ class InferenceContainerProperties(msrest.serialization.Model): """ _attribute_map = { - 'liveness_route': {'key': 'livenessRoute', 'type': 'Route'}, - 'readiness_route': {'key': 'readinessRoute', 'type': 'Route'}, - 'scoring_route': {'key': 'scoringRoute', 'type': 'Route'}, + "liveness_route": {"key": "livenessRoute", "type": "Route"}, + "readiness_route": {"key": "readinessRoute", "type": "Route"}, + "scoring_route": {"key": "scoringRoute", "type": "Route"}, } def __init__( self, *, - liveness_route: Optional["Route"] = None, - readiness_route: Optional["Route"] = None, - scoring_route: Optional["Route"] = None, - **kwargs - ): + liveness_route: Optional["_models.Route"] = None, + readiness_route: Optional["_models.Route"] = None, + scoring_route: Optional["_models.Route"] = None, + **kwargs: Any + ) -> None: """ :keyword liveness_route: The route to check the liveness of the inference server container. :paramtype liveness_route: ~azure.mgmt.machinelearningservices.models.Route @@ -18247,13 +18124,13 @@ def __init__( container. :paramtype scoring_route: ~azure.mgmt.machinelearningservices.models.Route """ - super(InferenceContainerProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.liveness_route = liveness_route self.readiness_route = readiness_route self.scoring_route = scoring_route -class InstanceTypeSchema(msrest.serialization.Model): +class InstanceTypeSchema(_serialization.Model): """Instance type schema. :ivar node_selector: Node Selector. @@ -18263,29 +18140,29 @@ class InstanceTypeSchema(msrest.serialization.Model): """ _attribute_map = { - 'node_selector': {'key': 'nodeSelector', 'type': '{str}'}, - 'resources': {'key': 'resources', 'type': 'InstanceTypeSchemaResources'}, + "node_selector": {"key": "nodeSelector", "type": "{str}"}, + "resources": {"key": "resources", "type": "InstanceTypeSchemaResources"}, } def __init__( self, *, node_selector: Optional[Dict[str, str]] = None, - resources: Optional["InstanceTypeSchemaResources"] = None, - **kwargs - ): + resources: Optional["_models.InstanceTypeSchemaResources"] = None, + **kwargs: Any + ) -> None: """ :keyword node_selector: Node Selector. :paramtype node_selector: dict[str, str] :keyword resources: Resource requests/limits for this instance type. :paramtype resources: ~azure.mgmt.machinelearningservices.models.InstanceTypeSchemaResources """ - super(InstanceTypeSchema, self).__init__(**kwargs) + super().__init__(**kwargs) self.node_selector = node_selector self.resources = resources -class InstanceTypeSchemaResources(msrest.serialization.Model): +class InstanceTypeSchemaResources(_serialization.Model): """Resource requests/limits for this instance type. :ivar requests: Resource requests for this instance type. @@ -18295,66 +18172,58 @@ class InstanceTypeSchemaResources(msrest.serialization.Model): """ _attribute_map = { - 'requests': {'key': 'requests', 'type': '{str}'}, - 'limits': {'key': 'limits', 'type': '{str}'}, + "requests": {"key": "requests", "type": "{str}"}, + "limits": {"key": "limits", "type": "{str}"}, } def __init__( - self, - *, - requests: Optional[Dict[str, str]] = None, - limits: Optional[Dict[str, str]] = None, - **kwargs - ): + self, *, requests: Optional[Dict[str, str]] = None, limits: Optional[Dict[str, str]] = None, **kwargs: Any + ) -> None: """ :keyword requests: Resource requests for this instance type. :paramtype requests: dict[str, str] :keyword limits: Resource limits for this instance type. :paramtype limits: dict[str, str] """ - super(InstanceTypeSchemaResources, self).__init__(**kwargs) + super().__init__(**kwargs) self.requests = requests self.limits = limits -class IntellectualProperty(msrest.serialization.Model): +class IntellectualProperty(_serialization.Model): """Intellectual Property details for a resource. All required parameters must be populated in order to send to Azure. - :ivar protection_level: Protection level of the Intellectual Property. Possible values include: - "All", "None". + :ivar protection_level: Protection level of the Intellectual Property. Known values are: "All" + and "None". :vartype protection_level: str or ~azure.mgmt.machinelearningservices.models.ProtectionLevel - :ivar publisher: Required. [Required] Publisher of the Intellectual Property. Must be the same - as Registry publisher name. + :ivar publisher: [Required] Publisher of the Intellectual Property. Must be the same as + Registry publisher name. Required. :vartype publisher: str """ _validation = { - 'publisher': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "publisher": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'protection_level': {'key': 'protectionLevel', 'type': 'str'}, - 'publisher': {'key': 'publisher', 'type': 'str'}, + "protection_level": {"key": "protectionLevel", "type": "str"}, + "publisher": {"key": "publisher", "type": "str"}, } def __init__( - self, - *, - publisher: str, - protection_level: Optional[Union[str, "ProtectionLevel"]] = None, - **kwargs - ): + self, *, publisher: str, protection_level: Optional[Union[str, "_models.ProtectionLevel"]] = None, **kwargs: Any + ) -> None: """ - :keyword protection_level: Protection level of the Intellectual Property. Possible values - include: "All", "None". + :keyword protection_level: Protection level of the Intellectual Property. Known values are: + "All" and "None". :paramtype protection_level: str or ~azure.mgmt.machinelearningservices.models.ProtectionLevel - :keyword publisher: Required. [Required] Publisher of the Intellectual Property. Must be the - same as Registry publisher name. + :keyword publisher: [Required] Publisher of the Intellectual Property. Must be the same as + Registry publisher name. Required. :paramtype publisher: str """ - super(IntellectualProperty, self).__init__(**kwargs) + super().__init__(**kwargs) self.protection_level = protection_level self.publisher = publisher @@ -18377,41 +18246,36 @@ class JobBase(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.JobBaseProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'JobBaseProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "JobBaseProperties"}, } - def __init__( - self, - *, - properties: "JobBaseProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.JobBaseProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.JobBaseProperties """ - super(JobBase, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class JobBaseResourceArmPaginatedResult(msrest.serialization.Model): +class JobBaseResourceArmPaginatedResult(_serialization.Model): """A paginated list of JobBase entities. :ivar next_link: The link to the next page of JobBase objects. If null, there are no additional @@ -18422,17 +18286,13 @@ class JobBaseResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[JobBase]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[JobBase]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["JobBase"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.JobBase"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of JobBase objects. If null, there are no additional pages. @@ -18440,7 +18300,7 @@ def __init__( :keyword value: An array of objects of type JobBase. :paramtype value: list[~azure.mgmt.machinelearningservices.models.JobBase] """ - super(JobBaseResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value @@ -18459,7 +18319,7 @@ class JobResourceConfiguration(ResourceConfiguration): For use with elastic training, currently supported by PyTorch distribution type only. :vartype max_instance_count: int :ivar properties: Additional properties bag. - :vartype properties: dict[str, any] + :vartype properties: dict[str, JSON] :ivar docker_args: Extra arguments to pass to the Docker run command. This would override any parameters that have already been set by the system, or in this section. This parameter is only supported for Azure ML compute types. @@ -18471,31 +18331,31 @@ class JobResourceConfiguration(ResourceConfiguration): """ _validation = { - 'shm_size': {'pattern': r'\d+[bBkKmMgG]'}, + "shm_size": {"pattern": r"\d+[bBkKmMgG]"}, } _attribute_map = { - 'instance_count': {'key': 'instanceCount', 'type': 'int'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'locations': {'key': 'locations', 'type': '[str]'}, - 'max_instance_count': {'key': 'maxInstanceCount', 'type': 'int'}, - 'properties': {'key': 'properties', 'type': '{object}'}, - 'docker_args': {'key': 'dockerArgs', 'type': 'str'}, - 'shm_size': {'key': 'shmSize', 'type': 'str'}, + "instance_count": {"key": "instanceCount", "type": "int"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "locations": {"key": "locations", "type": "[str]"}, + "max_instance_count": {"key": "maxInstanceCount", "type": "int"}, + "properties": {"key": "properties", "type": "{object}"}, + "docker_args": {"key": "dockerArgs", "type": "str"}, + "shm_size": {"key": "shmSize", "type": "str"}, } def __init__( self, *, - instance_count: Optional[int] = 1, + instance_count: int = 1, instance_type: Optional[str] = None, locations: Optional[List[str]] = None, max_instance_count: Optional[int] = None, - properties: Optional[Dict[str, Any]] = None, + properties: Optional[Dict[str, JSON]] = None, docker_args: Optional[str] = None, - shm_size: Optional[str] = "2g", - **kwargs - ): + shm_size: str = "2g", + **kwargs: Any + ) -> None: """ :keyword instance_count: Optional number of instances or nodes used by the compute target. :paramtype instance_count: int @@ -18508,7 +18368,7 @@ def __init__( For use with elastic training, currently supported by PyTorch distribution type only. :paramtype max_instance_count: int :keyword properties: Additional properties bag. - :paramtype properties: dict[str, any] + :paramtype properties: dict[str, JSON] :keyword docker_args: Extra arguments to pass to the Docker run command. This would override any parameters that have already been set by the system, or in this section. This parameter is only supported for Azure ML compute types. @@ -18518,7 +18378,14 @@ def __init__( b(bytes), k(kilobytes), m(megabytes), or g(gigabytes). :paramtype shm_size: str """ - super(JobResourceConfiguration, self).__init__(instance_count=instance_count, instance_type=instance_type, locations=locations, max_instance_count=max_instance_count, properties=properties, **kwargs) + super().__init__( + instance_count=instance_count, + instance_type=instance_type, + locations=locations, + max_instance_count=max_instance_count, + properties=properties, + **kwargs + ) self.docker_args = docker_args self.shm_size = shm_size @@ -18528,40 +18395,34 @@ class JobScheduleAction(ScheduleActionBase): All required parameters must be populated in order to send to Azure. - :ivar action_type: Required. [Required] Specifies the action type of the schedule.Constant - filled by server. Possible values include: "CreateJob", "InvokeBatchEndpoint", "ImportData", - "CreateMonitor". + :ivar action_type: [Required] Specifies the action type of the schedule. Required. Known values + are: "CreateJob", "InvokeBatchEndpoint", "ImportData", and "CreateMonitor". :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleActionType - :ivar job_definition: Required. [Required] Defines Schedule action definition details. + :ivar job_definition: [Required] Defines Schedule action definition details. Required. :vartype job_definition: ~azure.mgmt.machinelearningservices.models.JobBaseProperties """ _validation = { - 'action_type': {'required': True}, - 'job_definition': {'required': True}, + "action_type": {"required": True}, + "job_definition": {"required": True}, } _attribute_map = { - 'action_type': {'key': 'actionType', 'type': 'str'}, - 'job_definition': {'key': 'jobDefinition', 'type': 'JobBaseProperties'}, + "action_type": {"key": "actionType", "type": "str"}, + "job_definition": {"key": "jobDefinition", "type": "JobBaseProperties"}, } - def __init__( - self, - *, - job_definition: "JobBaseProperties", - **kwargs - ): + def __init__(self, *, job_definition: "_models.JobBaseProperties", **kwargs: Any) -> None: """ - :keyword job_definition: Required. [Required] Defines Schedule action definition details. + :keyword job_definition: [Required] Defines Schedule action definition details. Required. :paramtype job_definition: ~azure.mgmt.machinelearningservices.models.JobBaseProperties """ - super(JobScheduleAction, self).__init__(**kwargs) - self.action_type = 'CreateJob' # type: str + super().__init__(**kwargs) + self.action_type: str = "CreateJob" self.job_definition = job_definition -class JobService(msrest.serialization.Model): +class JobService(_serialization.Model): """Job endpoint definition. Variables are only populated by the server, and will be ignored when sending a request. @@ -18584,18 +18445,18 @@ class JobService(msrest.serialization.Model): """ _validation = { - 'error_message': {'readonly': True}, - 'status': {'readonly': True}, + "error_message": {"readonly": True}, + "status": {"readonly": True}, } _attribute_map = { - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'job_service_type': {'key': 'jobServiceType', 'type': 'str'}, - 'nodes': {'key': 'nodes', 'type': 'Nodes'}, - 'port': {'key': 'port', 'type': 'int'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'status': {'key': 'status', 'type': 'str'}, + "endpoint": {"key": "endpoint", "type": "str"}, + "error_message": {"key": "errorMessage", "type": "str"}, + "job_service_type": {"key": "jobServiceType", "type": "str"}, + "nodes": {"key": "nodes", "type": "Nodes"}, + "port": {"key": "port", "type": "int"}, + "properties": {"key": "properties", "type": "{str}"}, + "status": {"key": "status", "type": "str"}, } def __init__( @@ -18603,11 +18464,11 @@ def __init__( *, endpoint: Optional[str] = None, job_service_type: Optional[str] = None, - nodes: Optional["Nodes"] = None, + nodes: Optional["_models.Nodes"] = None, port: Optional[int] = None, properties: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword endpoint: Url for endpoint. :paramtype endpoint: str @@ -18621,7 +18482,7 @@ def __init__( :keyword properties: Additional properties to set on the endpoint. :paramtype properties: dict[str, str] """ - super(JobService, self).__init__(**kwargs) + super().__init__(**kwargs) self.endpoint = endpoint self.error_message = None self.job_service_type = job_service_type @@ -18631,89 +18492,84 @@ def __init__( self.status = None -class KerberosCredentials(msrest.serialization.Model): +class KerberosCredentials(_serialization.Model): """KerberosCredentials. All required parameters must be populated in order to send to Azure. - :ivar kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. + :ivar kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. :vartype kerberos_kdc_address: str - :ivar kerberos_principal: Required. [Required] Kerberos Username. + :ivar kerberos_principal: [Required] Kerberos Username. Required. :vartype kerberos_principal: str - :ivar kerberos_realm: Required. [Required] Domain over which a Kerberos authentication server - has the authority to authenticate a user, host or service. + :ivar kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. :vartype kerberos_realm: str """ _validation = { - 'kerberos_kdc_address': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_principal': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_realm': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "kerberos_kdc_address": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_principal": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_realm": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'kerberos_kdc_address': {'key': 'kerberosKdcAddress', 'type': 'str'}, - 'kerberos_principal': {'key': 'kerberosPrincipal', 'type': 'str'}, - 'kerberos_realm': {'key': 'kerberosRealm', 'type': 'str'}, + "kerberos_kdc_address": {"key": "kerberosKdcAddress", "type": "str"}, + "kerberos_principal": {"key": "kerberosPrincipal", "type": "str"}, + "kerberos_realm": {"key": "kerberosRealm", "type": "str"}, } def __init__( - self, - *, - kerberos_kdc_address: str, - kerberos_principal: str, - kerberos_realm: str, - **kwargs - ): + self, *, kerberos_kdc_address: str, kerberos_principal: str, kerberos_realm: str, **kwargs: Any + ) -> None: """ - :keyword kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. + :keyword kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. :paramtype kerberos_kdc_address: str - :keyword kerberos_principal: Required. [Required] Kerberos Username. + :keyword kerberos_principal: [Required] Kerberos Username. Required. :paramtype kerberos_principal: str - :keyword kerberos_realm: Required. [Required] Domain over which a Kerberos authentication - server has the authority to authenticate a user, host or service. + :keyword kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. :paramtype kerberos_realm: str """ - super(KerberosCredentials, self).__init__(**kwargs) + super().__init__(**kwargs) self.kerberos_kdc_address = kerberos_kdc_address self.kerberos_principal = kerberos_principal self.kerberos_realm = kerberos_realm -class KerberosKeytabCredentials(DatastoreCredentials, KerberosCredentials): +class KerberosKeytabCredentials(KerberosCredentials, DatastoreCredentials): """KerberosKeytabCredentials. All required parameters must be populated in order to send to Azure. - :ivar kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. :vartype kerberos_kdc_address: str - :ivar kerberos_principal: Required. [Required] Kerberos Username. + :ivar kerberos_principal: [Required] Kerberos Username. Required. :vartype kerberos_principal: str - :ivar kerberos_realm: Required. [Required] Domain over which a Kerberos authentication server - has the authority to authenticate a user, host or service. + :ivar kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. :vartype kerberos_realm: str - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". - :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType - :ivar secrets: Required. [Required] Keytab secrets. + :ivar secrets: [Required] Keytab secrets. Required. :vartype secrets: ~azure.mgmt.machinelearningservices.models.KerberosKeytabSecrets """ _validation = { - 'kerberos_kdc_address': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_principal': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_realm': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'credentials_type': {'required': True}, - 'secrets': {'required': True}, + "credentials_type": {"required": True}, + "kerberos_kdc_address": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_principal": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_realm": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "secrets": {"required": True}, } _attribute_map = { - 'kerberos_kdc_address': {'key': 'kerberosKdcAddress', 'type': 'str'}, - 'kerberos_principal': {'key': 'kerberosPrincipal', 'type': 'str'}, - 'kerberos_realm': {'key': 'kerberosRealm', 'type': 'str'}, - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'KerberosKeytabSecrets'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, + "kerberos_kdc_address": {"key": "kerberosKdcAddress", "type": "str"}, + "kerberos_principal": {"key": "kerberosPrincipal", "type": "str"}, + "kerberos_realm": {"key": "kerberosRealm", "type": "str"}, + "secrets": {"key": "secrets", "type": "KerberosKeytabSecrets"}, } def __init__( @@ -18722,26 +18578,31 @@ def __init__( kerberos_kdc_address: str, kerberos_principal: str, kerberos_realm: str, - secrets: "KerberosKeytabSecrets", - **kwargs - ): + secrets: "_models.KerberosKeytabSecrets", + **kwargs: Any + ) -> None: """ - :keyword kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. + :keyword kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. :paramtype kerberos_kdc_address: str - :keyword kerberos_principal: Required. [Required] Kerberos Username. + :keyword kerberos_principal: [Required] Kerberos Username. Required. :paramtype kerberos_principal: str - :keyword kerberos_realm: Required. [Required] Domain over which a Kerberos authentication - server has the authority to authenticate a user, host or service. + :keyword kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. :paramtype kerberos_realm: str - :keyword secrets: Required. [Required] Keytab secrets. + :keyword secrets: [Required] Keytab secrets. Required. :paramtype secrets: ~azure.mgmt.machinelearningservices.models.KerberosKeytabSecrets """ - super(KerberosKeytabCredentials, self).__init__(kerberos_kdc_address=kerberos_kdc_address, kerberos_principal=kerberos_principal, kerberos_realm=kerberos_realm, **kwargs) + super().__init__( + kerberos_kdc_address=kerberos_kdc_address, + kerberos_principal=kerberos_principal, + kerberos_realm=kerberos_realm, + **kwargs + ) + self.credentials_type: str = "KerberosKeytab" + self.secrets = secrets self.kerberos_kdc_address = kerberos_kdc_address self.kerberos_principal = kerberos_principal self.kerberos_realm = kerberos_realm - self.credentials_type = 'KerberosKeytab' # type: str - self.secrets = secrets class KerberosKeytabSecrets(DatastoreSecrets): @@ -18749,72 +18610,67 @@ class KerberosKeytabSecrets(DatastoreSecrets): All required parameters must be populated in order to send to Azure. - :ivar secrets_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", "Sas", - "ServicePrincipal", "KerberosPassword", "KerberosKeytab". + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType :ivar kerberos_keytab: Kerberos keytab secret. :vartype kerberos_keytab: str """ _validation = { - 'secrets_type': {'required': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'kerberos_keytab': {'key': 'kerberosKeytab', 'type': 'str'}, + "secrets_type": {"key": "secretsType", "type": "str"}, + "kerberos_keytab": {"key": "kerberosKeytab", "type": "str"}, } - def __init__( - self, - *, - kerberos_keytab: Optional[str] = None, - **kwargs - ): + def __init__(self, *, kerberos_keytab: Optional[str] = None, **kwargs: Any) -> None: """ :keyword kerberos_keytab: Kerberos keytab secret. :paramtype kerberos_keytab: str """ - super(KerberosKeytabSecrets, self).__init__(**kwargs) - self.secrets_type = 'KerberosKeytab' # type: str + super().__init__(**kwargs) + self.secrets_type: str = "KerberosKeytab" self.kerberos_keytab = kerberos_keytab -class KerberosPasswordCredentials(DatastoreCredentials, KerberosCredentials): +class KerberosPasswordCredentials(KerberosCredentials, DatastoreCredentials): """KerberosPasswordCredentials. All required parameters must be populated in order to send to Azure. - :ivar kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". + :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType + :ivar kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. :vartype kerberos_kdc_address: str - :ivar kerberos_principal: Required. [Required] Kerberos Username. + :ivar kerberos_principal: [Required] Kerberos Username. Required. :vartype kerberos_principal: str - :ivar kerberos_realm: Required. [Required] Domain over which a Kerberos authentication server - has the authority to authenticate a user, host or service. + :ivar kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. :vartype kerberos_realm: str - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". - :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType - :ivar secrets: Required. [Required] Kerberos password secrets. + :ivar secrets: [Required] Kerberos password secrets. Required. :vartype secrets: ~azure.mgmt.machinelearningservices.models.KerberosPasswordSecrets """ _validation = { - 'kerberos_kdc_address': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_principal': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'kerberos_realm': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'credentials_type': {'required': True}, - 'secrets': {'required': True}, + "credentials_type": {"required": True}, + "kerberos_kdc_address": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_principal": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "kerberos_realm": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "secrets": {"required": True}, } _attribute_map = { - 'kerberos_kdc_address': {'key': 'kerberosKdcAddress', 'type': 'str'}, - 'kerberos_principal': {'key': 'kerberosPrincipal', 'type': 'str'}, - 'kerberos_realm': {'key': 'kerberosRealm', 'type': 'str'}, - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'KerberosPasswordSecrets'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, + "kerberos_kdc_address": {"key": "kerberosKdcAddress", "type": "str"}, + "kerberos_principal": {"key": "kerberosPrincipal", "type": "str"}, + "kerberos_realm": {"key": "kerberosRealm", "type": "str"}, + "secrets": {"key": "secrets", "type": "KerberosPasswordSecrets"}, } def __init__( @@ -18823,26 +18679,31 @@ def __init__( kerberos_kdc_address: str, kerberos_principal: str, kerberos_realm: str, - secrets: "KerberosPasswordSecrets", - **kwargs - ): + secrets: "_models.KerberosPasswordSecrets", + **kwargs: Any + ) -> None: """ - :keyword kerberos_kdc_address: Required. [Required] IP Address or DNS HostName. + :keyword kerberos_kdc_address: [Required] IP Address or DNS HostName. Required. :paramtype kerberos_kdc_address: str - :keyword kerberos_principal: Required. [Required] Kerberos Username. + :keyword kerberos_principal: [Required] Kerberos Username. Required. :paramtype kerberos_principal: str - :keyword kerberos_realm: Required. [Required] Domain over which a Kerberos authentication - server has the authority to authenticate a user, host or service. + :keyword kerberos_realm: [Required] Domain over which a Kerberos authentication server has the + authority to authenticate a user, host or service. Required. :paramtype kerberos_realm: str - :keyword secrets: Required. [Required] Kerberos password secrets. + :keyword secrets: [Required] Kerberos password secrets. Required. :paramtype secrets: ~azure.mgmt.machinelearningservices.models.KerberosPasswordSecrets """ - super(KerberosPasswordCredentials, self).__init__(kerberos_kdc_address=kerberos_kdc_address, kerberos_principal=kerberos_principal, kerberos_realm=kerberos_realm, **kwargs) + super().__init__( + kerberos_kdc_address=kerberos_kdc_address, + kerberos_principal=kerberos_principal, + kerberos_realm=kerberos_realm, + **kwargs + ) + self.credentials_type: str = "KerberosPassword" + self.secrets = secrets self.kerberos_kdc_address = kerberos_kdc_address self.kerberos_principal = kerberos_principal self.kerberos_realm = kerberos_realm - self.credentials_type = 'KerberosPassword' # type: str - self.secrets = secrets class KerberosPasswordSecrets(DatastoreSecrets): @@ -18850,39 +18711,34 @@ class KerberosPasswordSecrets(DatastoreSecrets): All required parameters must be populated in order to send to Azure. - :ivar secrets_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", "Sas", - "ServicePrincipal", "KerberosPassword", "KerberosKeytab". + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType :ivar kerberos_password: Kerberos password secret. :vartype kerberos_password: str """ _validation = { - 'secrets_type': {'required': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'kerberos_password': {'key': 'kerberosPassword', 'type': 'str'}, + "secrets_type": {"key": "secretsType", "type": "str"}, + "kerberos_password": {"key": "kerberosPassword", "type": "str"}, } - def __init__( - self, - *, - kerberos_password: Optional[str] = None, - **kwargs - ): + def __init__(self, *, kerberos_password: Optional[str] = None, **kwargs: Any) -> None: """ :keyword kerberos_password: Kerberos password secret. :paramtype kerberos_password: str """ - super(KerberosPasswordSecrets, self).__init__(**kwargs) - self.secrets_type = 'KerberosPassword' # type: str + super().__init__(**kwargs) + self.secrets_type: str = "KerberosPassword" self.kerberos_password = kerberos_password -class KeyVaultProperties(msrest.serialization.Model): +class KeyVaultProperties(_serialization.Model): """Customer Key vault properties. All required parameters must be populated in order to send to Azure. @@ -18890,47 +18746,42 @@ class KeyVaultProperties(msrest.serialization.Model): :ivar identity_client_id: Currently, we support only SystemAssigned MSI. We need this when we support UserAssignedIdentities. :vartype identity_client_id: str - :ivar key_identifier: Required. KeyVault key identifier to encrypt the data. + :ivar key_identifier: KeyVault key identifier to encrypt the data. Required. :vartype key_identifier: str - :ivar key_vault_arm_id: Required. KeyVault Arm Id that contains the data encryption key. + :ivar key_vault_arm_id: KeyVault Arm Id that contains the data encryption key. Required. :vartype key_vault_arm_id: str """ _validation = { - 'key_identifier': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'key_vault_arm_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "key_identifier": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "key_vault_arm_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'identity_client_id': {'key': 'identityClientId', 'type': 'str'}, - 'key_identifier': {'key': 'keyIdentifier', 'type': 'str'}, - 'key_vault_arm_id': {'key': 'keyVaultArmId', 'type': 'str'}, + "identity_client_id": {"key": "identityClientId", "type": "str"}, + "key_identifier": {"key": "keyIdentifier", "type": "str"}, + "key_vault_arm_id": {"key": "keyVaultArmId", "type": "str"}, } def __init__( - self, - *, - key_identifier: str, - key_vault_arm_id: str, - identity_client_id: Optional[str] = None, - **kwargs - ): + self, *, key_identifier: str, key_vault_arm_id: str, identity_client_id: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword identity_client_id: Currently, we support only SystemAssigned MSI. We need this when we support UserAssignedIdentities. :paramtype identity_client_id: str - :keyword key_identifier: Required. KeyVault key identifier to encrypt the data. + :keyword key_identifier: KeyVault key identifier to encrypt the data. Required. :paramtype key_identifier: str - :keyword key_vault_arm_id: Required. KeyVault Arm Id that contains the data encryption key. + :keyword key_vault_arm_id: KeyVault Arm Id that contains the data encryption key. Required. :paramtype key_vault_arm_id: str """ - super(KeyVaultProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.identity_client_id = identity_client_id self.key_identifier = key_identifier self.key_vault_arm_id = key_vault_arm_id -class KubernetesSchema(msrest.serialization.Model): +class KubernetesSchema(_serialization.Model): """Kubernetes Compute Schema. :ivar properties: Properties of Kubernetes. @@ -18938,24 +18789,19 @@ class KubernetesSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'KubernetesProperties'}, + "properties": {"key": "properties", "type": "KubernetesProperties"}, } - def __init__( - self, - *, - properties: Optional["KubernetesProperties"] = None, - **kwargs - ): + def __init__(self, *, properties: Optional["_models.KubernetesProperties"] = None, **kwargs: Any) -> None: """ :keyword properties: Properties of Kubernetes. :paramtype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties """ - super(KubernetesSchema, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class Kubernetes(Compute, KubernetesSchema): +class Kubernetes(Compute, KubernetesSchema): # pylint: disable=too-many-instance-attributes """A Machine Learning compute based on Kubernetes Compute. Variables are only populated by the server, and will be ignored when sending a request. @@ -18964,15 +18810,15 @@ class Kubernetes(Compute, KubernetesSchema): :ivar properties: Properties of Kubernetes. :vartype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType :ivar compute_location: Location for the underlying compute. :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ProvisioningState :ivar description: The description of the Machine Learning compute. @@ -18994,38 +18840,38 @@ class Kubernetes(Compute, KubernetesSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'KubernetesProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "KubernetesProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } def __init__( self, *, - properties: Optional["KubernetesProperties"] = None, + properties: Optional["_models.KubernetesProperties"] = None, compute_location: Optional[str] = None, description: Optional[str] = None, resource_id: Optional[str] = None, disable_local_auth: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword properties: Properties of Kubernetes. :paramtype properties: ~azure.mgmt.machinelearningservices.models.KubernetesProperties @@ -19039,9 +18885,16 @@ def __init__( MSI and AAD exclusively for authentication. :paramtype disable_local_auth: bool """ - super(Kubernetes, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) + super().__init__( + compute_location=compute_location, + description=description, + resource_id=resource_id, + disable_local_auth=disable_local_auth, + properties=properties, + **kwargs + ) self.properties = properties - self.compute_type = 'Kubernetes' # type: str + self.compute_type: str = "Kubernetes" self.compute_location = compute_location self.provisioning_state = None self.description = description @@ -19053,11 +18906,11 @@ def __init__( self.disable_local_auth = disable_local_auth -class OnlineDeploymentProperties(EndpointDeploymentPropertiesBase): +class OnlineDeploymentProperties(EndpointDeploymentPropertiesBase): # pylint: disable=too-many-instance-attributes """OnlineDeploymentProperties. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: KubernetesOnlineDeployment, ManagedOnlineDeployment. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + KubernetesOnlineDeployment, ManagedOnlineDeployment Variables are only populated by the server, and will be ignored when sending a request. @@ -19079,12 +18932,12 @@ class OnlineDeploymentProperties(EndpointDeploymentPropertiesBase): :ivar data_collector: The mdc configuration, we disable mdc when it's null. :vartype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector :ivar egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Possible values include: "Enabled", + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and "Disabled". :vartype egress_public_network_access: str or ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :ivar endpoint_compute_type: Required. [Required] The compute type of the endpoint.Constant - filled by server. Possible values include: "Managed", "Kubernetes", "AzureMLCompute". + :ivar endpoint_compute_type: [Required] The compute type of the endpoint. Required. Known + values are: "Managed", "Kubernetes", and "AzureMLCompute". :vartype endpoint_compute_type: str or ~azure.mgmt.machinelearningservices.models.EndpointComputeType :ivar instance_type: Compute instance type. @@ -19095,8 +18948,8 @@ class OnlineDeploymentProperties(EndpointDeploymentPropertiesBase): :vartype model: str :ivar model_mount_path: The path to mount the model in custom container. :vartype model_mount_path: str - :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values - include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled". + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. @@ -19112,54 +18965,54 @@ class OnlineDeploymentProperties(EndpointDeploymentPropertiesBase): """ _validation = { - 'endpoint_compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, + "endpoint_compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, - 'data_collector': {'key': 'dataCollector', 'type': 'DataCollector'}, - 'egress_public_network_access': {'key': 'egressPublicNetworkAccess', 'type': 'str'}, - 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, - 'model': {'key': 'model', 'type': 'str'}, - 'model_mount_path': {'key': 'modelMountPath', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, - 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, + "data_collector": {"key": "dataCollector", "type": "DataCollector"}, + "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, + "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, + "model": {"key": "model", "type": "str"}, + "model_mount_path": {"key": "modelMountPath", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, + "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, + "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, } _subtype_map = { - 'endpoint_compute_type': {'Kubernetes': 'KubernetesOnlineDeployment', 'Managed': 'ManagedOnlineDeployment'} + "endpoint_compute_type": {"Kubernetes": "KubernetesOnlineDeployment", "Managed": "ManagedOnlineDeployment"} } def __init__( self, *, - code_configuration: Optional["CodeConfiguration"] = None, + code_configuration: Optional["_models.CodeConfiguration"] = None, description: Optional[str] = None, environment_id: Optional[str] = None, environment_variables: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, - app_insights_enabled: Optional[bool] = False, - data_collector: Optional["DataCollector"] = None, - egress_public_network_access: Optional[Union[str, "EgressPublicNetworkAccessType"]] = None, + app_insights_enabled: bool = False, + data_collector: Optional["_models.DataCollector"] = None, + egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, instance_type: Optional[str] = None, - liveness_probe: Optional["ProbeSettings"] = None, + liveness_probe: Optional["_models.ProbeSettings"] = None, model: Optional[str] = None, model_mount_path: Optional[str] = None, - readiness_probe: Optional["ProbeSettings"] = None, - request_settings: Optional["OnlineRequestSettings"] = None, - scale_settings: Optional["OnlineScaleSettings"] = None, - **kwargs - ): + readiness_probe: Optional["_models.ProbeSettings"] = None, + request_settings: Optional["_models.OnlineRequestSettings"] = None, + scale_settings: Optional["_models.OnlineScaleSettings"] = None, + **kwargs: Any + ) -> None: """ :keyword code_configuration: Code configuration for the endpoint deployment. :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration @@ -19177,7 +19030,7 @@ def __init__( :keyword data_collector: The mdc configuration, we disable mdc when it's null. :paramtype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector :keyword egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Possible values include: "Enabled", + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and "Disabled". :paramtype egress_public_network_access: str or ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType @@ -19200,11 +19053,18 @@ def __init__( and to DefaultScaleSettings for ManagedOnlineDeployment. :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings """ - super(OnlineDeploymentProperties, self).__init__(code_configuration=code_configuration, description=description, environment_id=environment_id, environment_variables=environment_variables, properties=properties, **kwargs) + super().__init__( + code_configuration=code_configuration, + description=description, + environment_id=environment_id, + environment_variables=environment_variables, + properties=properties, + **kwargs + ) self.app_insights_enabled = app_insights_enabled self.data_collector = data_collector self.egress_public_network_access = egress_public_network_access - self.endpoint_compute_type = 'OnlineDeploymentProperties' # type: str + self.endpoint_compute_type: Optional[str] = None self.instance_type = instance_type self.liveness_probe = liveness_probe self.model = model @@ -19215,7 +19075,7 @@ def __init__( self.scale_settings = scale_settings -class KubernetesOnlineDeployment(OnlineDeploymentProperties): +class KubernetesOnlineDeployment(OnlineDeploymentProperties): # pylint: disable=too-many-instance-attributes """Properties specific to a KubernetesOnlineDeployment. Variables are only populated by the server, and will be ignored when sending a request. @@ -19238,12 +19098,12 @@ class KubernetesOnlineDeployment(OnlineDeploymentProperties): :ivar data_collector: The mdc configuration, we disable mdc when it's null. :vartype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector :ivar egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Possible values include: "Enabled", + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and "Disabled". :vartype egress_public_network_access: str or ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :ivar endpoint_compute_type: Required. [Required] The compute type of the endpoint.Constant - filled by server. Possible values include: "Managed", "Kubernetes", "AzureMLCompute". + :ivar endpoint_compute_type: [Required] The compute type of the endpoint. Required. Known + values are: "Managed", "Kubernetes", and "AzureMLCompute". :vartype endpoint_compute_type: str or ~azure.mgmt.machinelearningservices.models.EndpointComputeType :ivar instance_type: Compute instance type. @@ -19254,8 +19114,8 @@ class KubernetesOnlineDeployment(OnlineDeploymentProperties): :vartype model: str :ivar model_mount_path: The path to mount the model in custom container. :vartype model_mount_path: str - :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values - include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled". + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. @@ -19275,52 +19135,55 @@ class KubernetesOnlineDeployment(OnlineDeploymentProperties): """ _validation = { - 'endpoint_compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, + "endpoint_compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, - 'data_collector': {'key': 'dataCollector', 'type': 'DataCollector'}, - 'egress_public_network_access': {'key': 'egressPublicNetworkAccess', 'type': 'str'}, - 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, - 'model': {'key': 'model', 'type': 'str'}, - 'model_mount_path': {'key': 'modelMountPath', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, - 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, - 'container_resource_requirements': {'key': 'containerResourceRequirements', 'type': 'ContainerResourceRequirements'}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, + "data_collector": {"key": "dataCollector", "type": "DataCollector"}, + "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, + "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, + "model": {"key": "model", "type": "str"}, + "model_mount_path": {"key": "modelMountPath", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, + "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, + "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, + "container_resource_requirements": { + "key": "containerResourceRequirements", + "type": "ContainerResourceRequirements", + }, } def __init__( self, *, - code_configuration: Optional["CodeConfiguration"] = None, + code_configuration: Optional["_models.CodeConfiguration"] = None, description: Optional[str] = None, environment_id: Optional[str] = None, environment_variables: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, - app_insights_enabled: Optional[bool] = False, - data_collector: Optional["DataCollector"] = None, - egress_public_network_access: Optional[Union[str, "EgressPublicNetworkAccessType"]] = None, + app_insights_enabled: bool = False, + data_collector: Optional["_models.DataCollector"] = None, + egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, instance_type: Optional[str] = None, - liveness_probe: Optional["ProbeSettings"] = None, + liveness_probe: Optional["_models.ProbeSettings"] = None, model: Optional[str] = None, model_mount_path: Optional[str] = None, - readiness_probe: Optional["ProbeSettings"] = None, - request_settings: Optional["OnlineRequestSettings"] = None, - scale_settings: Optional["OnlineScaleSettings"] = None, - container_resource_requirements: Optional["ContainerResourceRequirements"] = None, - **kwargs - ): + readiness_probe: Optional["_models.ProbeSettings"] = None, + request_settings: Optional["_models.OnlineRequestSettings"] = None, + scale_settings: Optional["_models.OnlineScaleSettings"] = None, + container_resource_requirements: Optional["_models.ContainerResourceRequirements"] = None, + **kwargs: Any + ) -> None: """ :keyword code_configuration: Code configuration for the endpoint deployment. :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration @@ -19338,7 +19201,7 @@ def __init__( :keyword data_collector: The mdc configuration, we disable mdc when it's null. :paramtype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector :keyword egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Possible values include: "Enabled", + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and "Disabled". :paramtype egress_public_network_access: str or ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType @@ -19365,12 +19228,29 @@ def __init__( :paramtype container_resource_requirements: ~azure.mgmt.machinelearningservices.models.ContainerResourceRequirements """ - super(KubernetesOnlineDeployment, self).__init__(code_configuration=code_configuration, description=description, environment_id=environment_id, environment_variables=environment_variables, properties=properties, app_insights_enabled=app_insights_enabled, data_collector=data_collector, egress_public_network_access=egress_public_network_access, instance_type=instance_type, liveness_probe=liveness_probe, model=model, model_mount_path=model_mount_path, readiness_probe=readiness_probe, request_settings=request_settings, scale_settings=scale_settings, **kwargs) - self.endpoint_compute_type = 'Kubernetes' # type: str + super().__init__( + code_configuration=code_configuration, + description=description, + environment_id=environment_id, + environment_variables=environment_variables, + properties=properties, + app_insights_enabled=app_insights_enabled, + data_collector=data_collector, + egress_public_network_access=egress_public_network_access, + instance_type=instance_type, + liveness_probe=liveness_probe, + model=model, + model_mount_path=model_mount_path, + readiness_probe=readiness_probe, + request_settings=request_settings, + scale_settings=scale_settings, + **kwargs + ) + self.endpoint_compute_type: str = "Kubernetes" self.container_resource_requirements = container_resource_requirements -class KubernetesProperties(msrest.serialization.Model): +class KubernetesProperties(_serialization.Model): """Kubernetes properties. :ivar relay_connection_string: Relay connection string. @@ -19393,14 +19273,14 @@ class KubernetesProperties(msrest.serialization.Model): """ _attribute_map = { - 'relay_connection_string': {'key': 'relayConnectionString', 'type': 'str'}, - 'service_bus_connection_string': {'key': 'serviceBusConnectionString', 'type': 'str'}, - 'extension_principal_id': {'key': 'extensionPrincipalId', 'type': 'str'}, - 'extension_instance_release_train': {'key': 'extensionInstanceReleaseTrain', 'type': 'str'}, - 'vc_name': {'key': 'vcName', 'type': 'str'}, - 'namespace': {'key': 'namespace', 'type': 'str'}, - 'default_instance_type': {'key': 'defaultInstanceType', 'type': 'str'}, - 'instance_types': {'key': 'instanceTypes', 'type': '{InstanceTypeSchema}'}, + "relay_connection_string": {"key": "relayConnectionString", "type": "str"}, + "service_bus_connection_string": {"key": "serviceBusConnectionString", "type": "str"}, + "extension_principal_id": {"key": "extensionPrincipalId", "type": "str"}, + "extension_instance_release_train": {"key": "extensionInstanceReleaseTrain", "type": "str"}, + "vc_name": {"key": "vcName", "type": "str"}, + "namespace": {"key": "namespace", "type": "str"}, + "default_instance_type": {"key": "defaultInstanceType", "type": "str"}, + "instance_types": {"key": "instanceTypes", "type": "{InstanceTypeSchema}"}, } def __init__( @@ -19411,11 +19291,11 @@ def __init__( extension_principal_id: Optional[str] = None, extension_instance_release_train: Optional[str] = None, vc_name: Optional[str] = None, - namespace: Optional[str] = "default", + namespace: str = "default", default_instance_type: Optional[str] = None, - instance_types: Optional[Dict[str, "InstanceTypeSchema"]] = None, - **kwargs - ): + instance_types: Optional[Dict[str, "_models.InstanceTypeSchema"]] = None, + **kwargs: Any + ) -> None: """ :keyword relay_connection_string: Relay connection string. :paramtype relay_connection_string: str @@ -19435,7 +19315,7 @@ def __init__( :paramtype instance_types: dict[str, ~azure.mgmt.machinelearningservices.models.InstanceTypeSchema] """ - super(KubernetesProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.relay_connection_string = relay_connection_string self.service_bus_connection_string = service_bus_connection_string self.extension_principal_id = extension_principal_id @@ -19446,7 +19326,7 @@ def __init__( self.instance_types = instance_types -class LabelCategory(msrest.serialization.Model): +class LabelCategory(_serialization.Model): """Label category definition. :ivar classes: Dictionary of label classes in this category. @@ -19454,40 +19334,40 @@ class LabelCategory(msrest.serialization.Model): :ivar display_name: Display name of the label category. :vartype display_name: str :ivar multi_select: Indicates whether it is allowed to select multiple classes in this - category. Possible values include: "Enabled", "Disabled". + category. Known values are: "Enabled" and "Disabled". :vartype multi_select: str or ~azure.mgmt.machinelearningservices.models.MultiSelect """ _attribute_map = { - 'classes': {'key': 'classes', 'type': '{LabelClass}'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'multi_select': {'key': 'multiSelect', 'type': 'str'}, + "classes": {"key": "classes", "type": "{LabelClass}"}, + "display_name": {"key": "displayName", "type": "str"}, + "multi_select": {"key": "multiSelect", "type": "str"}, } def __init__( self, *, - classes: Optional[Dict[str, "LabelClass"]] = None, + classes: Optional[Dict[str, "_models.LabelClass"]] = None, display_name: Optional[str] = None, - multi_select: Optional[Union[str, "MultiSelect"]] = None, - **kwargs - ): + multi_select: Optional[Union[str, "_models.MultiSelect"]] = None, + **kwargs: Any + ) -> None: """ :keyword classes: Dictionary of label classes in this category. :paramtype classes: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] :keyword display_name: Display name of the label category. :paramtype display_name: str :keyword multi_select: Indicates whether it is allowed to select multiple classes in this - category. Possible values include: "Enabled", "Disabled". + category. Known values are: "Enabled" and "Disabled". :paramtype multi_select: str or ~azure.mgmt.machinelearningservices.models.MultiSelect """ - super(LabelCategory, self).__init__(**kwargs) + super().__init__(**kwargs) self.classes = classes self.display_name = display_name self.multi_select = multi_select -class LabelClass(msrest.serialization.Model): +class LabelClass(_serialization.Model): """Label class definition. :ivar display_name: Display name of the label class. @@ -19497,60 +19377,60 @@ class LabelClass(msrest.serialization.Model): """ _attribute_map = { - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'subclasses': {'key': 'subclasses', 'type': '{LabelClass}'}, + "display_name": {"key": "displayName", "type": "str"}, + "subclasses": {"key": "subclasses", "type": "{LabelClass}"}, } def __init__( self, *, display_name: Optional[str] = None, - subclasses: Optional[Dict[str, "LabelClass"]] = None, - **kwargs - ): + subclasses: Optional[Dict[str, "_models.LabelClass"]] = None, + **kwargs: Any + ) -> None: """ :keyword display_name: Display name of the label class. :paramtype display_name: str :keyword subclasses: Dictionary of subclasses of the label class. :paramtype subclasses: dict[str, ~azure.mgmt.machinelearningservices.models.LabelClass] """ - super(LabelClass, self).__init__(**kwargs) + super().__init__(**kwargs) self.display_name = display_name self.subclasses = subclasses -class LabelingDataConfiguration(msrest.serialization.Model): +class LabelingDataConfiguration(_serialization.Model): """Labeling data configuration definition. :ivar data_id: Resource Id of the data asset to perform labeling. :vartype data_id: str - :ivar incremental_data_refresh: Indicates whether to enable incremental data refresh. Possible - values include: "Enabled", "Disabled". + :ivar incremental_data_refresh: Indicates whether to enable incremental data refresh. Known + values are: "Enabled" and "Disabled". :vartype incremental_data_refresh: str or ~azure.mgmt.machinelearningservices.models.IncrementalDataRefresh """ _attribute_map = { - 'data_id': {'key': 'dataId', 'type': 'str'}, - 'incremental_data_refresh': {'key': 'incrementalDataRefresh', 'type': 'str'}, + "data_id": {"key": "dataId", "type": "str"}, + "incremental_data_refresh": {"key": "incrementalDataRefresh", "type": "str"}, } def __init__( self, *, data_id: Optional[str] = None, - incremental_data_refresh: Optional[Union[str, "IncrementalDataRefresh"]] = None, - **kwargs - ): + incremental_data_refresh: Optional[Union[str, "_models.IncrementalDataRefresh"]] = None, + **kwargs: Any + ) -> None: """ :keyword data_id: Resource Id of the data asset to perform labeling. :paramtype data_id: str - :keyword incremental_data_refresh: Indicates whether to enable incremental data refresh. - Possible values include: "Enabled", "Disabled". + :keyword incremental_data_refresh: Indicates whether to enable incremental data refresh. Known + values are: "Enabled" and "Disabled". :paramtype incremental_data_refresh: str or ~azure.mgmt.machinelearningservices.models.IncrementalDataRefresh """ - super(LabelingDataConfiguration, self).__init__(**kwargs) + super().__init__(**kwargs) self.data_id = data_id self.incremental_data_refresh = incremental_data_refresh @@ -19573,73 +19453,62 @@ class LabelingJob(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'LabelingJobProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "LabelingJobProperties"}, } - def __init__( - self, - *, - properties: "LabelingJobProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.LabelingJobProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties """ - super(LabelingJob, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class LabelingJobMediaProperties(msrest.serialization.Model): +class LabelingJobMediaProperties(_serialization.Model): """Properties of a labeling job. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: LabelingJobImageProperties, LabelingJobTextProperties. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + LabelingJobImageProperties, LabelingJobTextProperties All required parameters must be populated in order to send to Azure. - :ivar media_type: Required. [Required] Media type of the job.Constant filled by server. - Possible values include: "Image", "Text". + :ivar media_type: [Required] Media type of the job. Required. Known values are: "Image" and + "Text". :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType """ _validation = { - 'media_type': {'required': True}, + "media_type": {"required": True}, } _attribute_map = { - 'media_type': {'key': 'mediaType', 'type': 'str'}, + "media_type": {"key": "mediaType", "type": "str"}, } - _subtype_map = { - 'media_type': {'Image': 'LabelingJobImageProperties', 'Text': 'LabelingJobTextProperties'} - } + _subtype_map = {"media_type": {"Image": "LabelingJobImageProperties", "Text": "LabelingJobTextProperties"}} - def __init__( - self, - **kwargs - ): - """ - """ - super(LabelingJobMediaProperties, self).__init__(**kwargs) - self.media_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.media_type: Optional[str] = None class LabelingJobImageProperties(LabelingJobMediaProperties): @@ -19647,41 +19516,38 @@ class LabelingJobImageProperties(LabelingJobMediaProperties): All required parameters must be populated in order to send to Azure. - :ivar media_type: Required. [Required] Media type of the job.Constant filled by server. - Possible values include: "Image", "Text". + :ivar media_type: [Required] Media type of the job. Required. Known values are: "Image" and + "Text". :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType - :ivar annotation_type: Annotation type of image labeling job. Possible values include: - "Classification", "BoundingBox", "InstanceSegmentation". + :ivar annotation_type: Annotation type of image labeling job. Known values are: + "Classification", "BoundingBox", and "InstanceSegmentation". :vartype annotation_type: str or ~azure.mgmt.machinelearningservices.models.ImageAnnotationType """ _validation = { - 'media_type': {'required': True}, + "media_type": {"required": True}, } _attribute_map = { - 'media_type': {'key': 'mediaType', 'type': 'str'}, - 'annotation_type': {'key': 'annotationType', 'type': 'str'}, + "media_type": {"key": "mediaType", "type": "str"}, + "annotation_type": {"key": "annotationType", "type": "str"}, } def __init__( - self, - *, - annotation_type: Optional[Union[str, "ImageAnnotationType"]] = None, - **kwargs - ): + self, *, annotation_type: Optional[Union[str, "_models.ImageAnnotationType"]] = None, **kwargs: Any + ) -> None: """ - :keyword annotation_type: Annotation type of image labeling job. Possible values include: - "Classification", "BoundingBox", "InstanceSegmentation". + :keyword annotation_type: Annotation type of image labeling job. Known values are: + "Classification", "BoundingBox", and "InstanceSegmentation". :paramtype annotation_type: str or ~azure.mgmt.machinelearningservices.models.ImageAnnotationType """ - super(LabelingJobImageProperties, self).__init__(**kwargs) - self.media_type = 'Image' # type: str + super().__init__(**kwargs) + self.media_type: str = "Image" self.annotation_type = annotation_type -class LabelingJobInstructions(msrest.serialization.Model): +class LabelingJobInstructions(_serialization.Model): """Instructions for labeling job. :ivar uri: The link to a page with detailed labeling instructions for labelers. @@ -19689,24 +19555,19 @@ class LabelingJobInstructions(msrest.serialization.Model): """ _attribute_map = { - 'uri': {'key': 'uri', 'type': 'str'}, + "uri": {"key": "uri", "type": "str"}, } - def __init__( - self, - *, - uri: Optional[str] = None, - **kwargs - ): + def __init__(self, *, uri: Optional[str] = None, **kwargs: Any) -> None: """ :keyword uri: The link to a page with detailed labeling instructions for labelers. :paramtype uri: str """ - super(LabelingJobInstructions, self).__init__(**kwargs) + super().__init__(**kwargs) self.uri = uri -class LabelingJobProperties(JobBaseProperties): +class LabelingJobProperties(JobBaseProperties): # pylint: disable=too-many-instance-attributes """Labeling job definition. Variables are only populated by the server, and will be ignored when sending a request. @@ -19717,7 +19578,7 @@ class LabelingJobProperties(JobBaseProperties): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar component_id: ARM resource ID of the component resource. :vartype component_id: str @@ -19734,8 +19595,8 @@ class LabelingJobProperties(JobBaseProperties): :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration :ivar is_archived: Is the asset archived?. :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType :ivar notification_setting: Notification setting for the job. :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting @@ -19745,9 +19606,9 @@ class LabelingJobProperties(JobBaseProperties): :ivar services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus :ivar created_date_time: Created time of the job in UTC timezone. :vartype created_date_time: ~datetime.datetime @@ -19768,8 +19629,8 @@ class LabelingJobProperties(JobBaseProperties): :vartype progress_metrics: ~azure.mgmt.machinelearningservices.models.ProgressMetrics :ivar project_id: Internal id of the job(Previously called project). :vartype project_id: str - :ivar provisioning_state: Specifies the labeling job provisioning state. Possible values - include: "Succeeded", "Failed", "Canceled", "InProgress". + :ivar provisioning_state: Specifies the labeling job provisioning state. Known values are: + "Succeeded", "Failed", "Canceled", and "InProgress". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.JobProvisioningState :ivar status_messages: Status messages of the job. @@ -19777,43 +19638,43 @@ class LabelingJobProperties(JobBaseProperties): """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'created_date_time': {'readonly': True}, - 'progress_metrics': {'readonly': True}, - 'project_id': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'status_messages': {'readonly': True}, + "job_type": {"required": True}, + "status": {"readonly": True}, + "created_date_time": {"readonly": True}, + "progress_metrics": {"readonly": True}, + "project_id": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "status_messages": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'}, - 'data_configuration': {'key': 'dataConfiguration', 'type': 'LabelingDataConfiguration'}, - 'job_instructions': {'key': 'jobInstructions', 'type': 'LabelingJobInstructions'}, - 'label_categories': {'key': 'labelCategories', 'type': '{LabelCategory}'}, - 'labeling_job_media_properties': {'key': 'labelingJobMediaProperties', 'type': 'LabelingJobMediaProperties'}, - 'ml_assist_configuration': {'key': 'mlAssistConfiguration', 'type': 'MLAssistConfiguration'}, - 'progress_metrics': {'key': 'progressMetrics', 'type': 'ProgressMetrics'}, - 'project_id': {'key': 'projectId', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'status_messages': {'key': 'statusMessages', 'type': '[StatusMessage]'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "created_date_time": {"key": "createdDateTime", "type": "iso-8601"}, + "data_configuration": {"key": "dataConfiguration", "type": "LabelingDataConfiguration"}, + "job_instructions": {"key": "jobInstructions", "type": "LabelingJobInstructions"}, + "label_categories": {"key": "labelCategories", "type": "{LabelCategory}"}, + "labeling_job_media_properties": {"key": "labelingJobMediaProperties", "type": "LabelingJobMediaProperties"}, + "ml_assist_configuration": {"key": "mlAssistConfiguration", "type": "MLAssistConfiguration"}, + "progress_metrics": {"key": "progressMetrics", "type": "ProgressMetrics"}, + "project_id": {"key": "projectId", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "status_messages": {"key": "statusMessages", "type": "[StatusMessage]"}, } - def __init__( + def __init__( # pylint: disable=too-many-locals self, *, description: Optional[str] = None, @@ -19822,25 +19683,25 @@ def __init__( component_id: Optional[str] = None, compute_id: Optional[str] = None, display_name: Optional[str] = None, - experiment_name: Optional[str] = "Default", - identity: Optional["IdentityConfiguration"] = None, - is_archived: Optional[bool] = False, - notification_setting: Optional["NotificationSetting"] = None, - secrets_configuration: Optional[Dict[str, "SecretConfiguration"]] = None, - services: Optional[Dict[str, "JobService"]] = None, - data_configuration: Optional["LabelingDataConfiguration"] = None, - job_instructions: Optional["LabelingJobInstructions"] = None, - label_categories: Optional[Dict[str, "LabelCategory"]] = None, - labeling_job_media_properties: Optional["LabelingJobMediaProperties"] = None, - ml_assist_configuration: Optional["MLAssistConfiguration"] = None, - **kwargs - ): + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + notification_setting: Optional["_models.NotificationSetting"] = None, + secrets_configuration: Optional[Dict[str, "_models.SecretConfiguration"]] = None, + services: Optional[Dict[str, "_models.JobService"]] = None, + data_configuration: Optional["_models.LabelingDataConfiguration"] = None, + job_instructions: Optional["_models.LabelingJobInstructions"] = None, + label_categories: Optional[Dict[str, "_models.LabelCategory"]] = None, + labeling_job_media_properties: Optional["_models.LabelingJobMediaProperties"] = None, + ml_assist_configuration: Optional["_models.MLAssistConfiguration"] = None, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword component_id: ARM resource ID of the component resource. :paramtype component_id: str @@ -19880,8 +19741,22 @@ def __init__( :paramtype ml_assist_configuration: ~azure.mgmt.machinelearningservices.models.MLAssistConfiguration """ - super(LabelingJobProperties, self).__init__(description=description, properties=properties, tags=tags, component_id=component_id, compute_id=compute_id, display_name=display_name, experiment_name=experiment_name, identity=identity, is_archived=is_archived, notification_setting=notification_setting, secrets_configuration=secrets_configuration, services=services, **kwargs) - self.job_type = 'Labeling' # type: str + super().__init__( + description=description, + properties=properties, + tags=tags, + component_id=component_id, + compute_id=compute_id, + display_name=display_name, + experiment_name=experiment_name, + identity=identity, + is_archived=is_archived, + notification_setting=notification_setting, + secrets_configuration=secrets_configuration, + services=services, + **kwargs + ) + self.job_type: str = "Labeling" self.created_date_time = None self.data_configuration = data_configuration self.job_instructions = job_instructions @@ -19894,7 +19769,7 @@ def __init__( self.status_messages = None -class LabelingJobResourceArmPaginatedResult(msrest.serialization.Model): +class LabelingJobResourceArmPaginatedResult(_serialization.Model): """A paginated list of LabelingJob entities. :ivar next_link: The link to the next page of LabelingJob objects. If null, there are no @@ -19905,17 +19780,13 @@ class LabelingJobResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[LabelingJob]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[LabelingJob]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["LabelingJob"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.LabelingJob"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of LabelingJob objects. If null, there are no additional pages. @@ -19923,7 +19794,7 @@ def __init__( :keyword value: An array of objects of type LabelingJob. :paramtype value: list[~azure.mgmt.machinelearningservices.models.LabelingJob] """ - super(LabelingJobResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value @@ -19933,82 +19804,71 @@ class LabelingJobTextProperties(LabelingJobMediaProperties): All required parameters must be populated in order to send to Azure. - :ivar media_type: Required. [Required] Media type of the job.Constant filled by server. - Possible values include: "Image", "Text". + :ivar media_type: [Required] Media type of the job. Required. Known values are: "Image" and + "Text". :vartype media_type: str or ~azure.mgmt.machinelearningservices.models.MediaType - :ivar annotation_type: Annotation type of text labeling job. Possible values include: - "Classification", "NamedEntityRecognition". + :ivar annotation_type: Annotation type of text labeling job. Known values are: "Classification" + and "NamedEntityRecognition". :vartype annotation_type: str or ~azure.mgmt.machinelearningservices.models.TextAnnotationType """ _validation = { - 'media_type': {'required': True}, + "media_type": {"required": True}, } _attribute_map = { - 'media_type': {'key': 'mediaType', 'type': 'str'}, - 'annotation_type': {'key': 'annotationType', 'type': 'str'}, + "media_type": {"key": "mediaType", "type": "str"}, + "annotation_type": {"key": "annotationType", "type": "str"}, } def __init__( - self, - *, - annotation_type: Optional[Union[str, "TextAnnotationType"]] = None, - **kwargs - ): + self, *, annotation_type: Optional[Union[str, "_models.TextAnnotationType"]] = None, **kwargs: Any + ) -> None: """ - :keyword annotation_type: Annotation type of text labeling job. Possible values include: - "Classification", "NamedEntityRecognition". + :keyword annotation_type: Annotation type of text labeling job. Known values are: + "Classification" and "NamedEntityRecognition". :paramtype annotation_type: str or ~azure.mgmt.machinelearningservices.models.TextAnnotationType """ - super(LabelingJobTextProperties, self).__init__(**kwargs) - self.media_type = 'Text' # type: str + super().__init__(**kwargs) + self.media_type: str = "Text" self.annotation_type = annotation_type -class OneLakeArtifact(msrest.serialization.Model): +class OneLakeArtifact(_serialization.Model): """OneLake artifact (data source) configuration. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: LakeHouseArtifact. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + LakeHouseArtifact All required parameters must be populated in order to send to Azure. - :ivar artifact_name: Required. [Required] OneLake artifact name. + :ivar artifact_name: [Required] OneLake artifact name. Required. :vartype artifact_name: str - :ivar artifact_type: Required. [Required] OneLake artifact type.Constant filled by server. - Possible values include: "LakeHouse". + :ivar artifact_type: [Required] OneLake artifact type. Required. "LakeHouse" :vartype artifact_type: str or ~azure.mgmt.machinelearningservices.models.OneLakeArtifactType """ _validation = { - 'artifact_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'artifact_type': {'required': True}, + "artifact_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "artifact_type": {"required": True}, } _attribute_map = { - 'artifact_name': {'key': 'artifactName', 'type': 'str'}, - 'artifact_type': {'key': 'artifactType', 'type': 'str'}, + "artifact_name": {"key": "artifactName", "type": "str"}, + "artifact_type": {"key": "artifactType", "type": "str"}, } - _subtype_map = { - 'artifact_type': {'LakeHouse': 'LakeHouseArtifact'} - } + _subtype_map = {"artifact_type": {"LakeHouse": "LakeHouseArtifact"}} - def __init__( - self, - *, - artifact_name: str, - **kwargs - ): + def __init__(self, *, artifact_name: str, **kwargs: Any) -> None: """ - :keyword artifact_name: Required. [Required] OneLake artifact name. + :keyword artifact_name: [Required] OneLake artifact name. Required. :paramtype artifact_name: str """ - super(OneLakeArtifact, self).__init__(**kwargs) + super().__init__(**kwargs) self.artifact_name = artifact_name - self.artifact_type = None # type: Optional[str] + self.artifact_type: Optional[str] = None class LakeHouseArtifact(OneLakeArtifact): @@ -20016,38 +19876,32 @@ class LakeHouseArtifact(OneLakeArtifact): All required parameters must be populated in order to send to Azure. - :ivar artifact_name: Required. [Required] OneLake artifact name. + :ivar artifact_name: [Required] OneLake artifact name. Required. :vartype artifact_name: str - :ivar artifact_type: Required. [Required] OneLake artifact type.Constant filled by server. - Possible values include: "LakeHouse". + :ivar artifact_type: [Required] OneLake artifact type. Required. "LakeHouse" :vartype artifact_type: str or ~azure.mgmt.machinelearningservices.models.OneLakeArtifactType """ _validation = { - 'artifact_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'artifact_type': {'required': True}, + "artifact_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "artifact_type": {"required": True}, } _attribute_map = { - 'artifact_name': {'key': 'artifactName', 'type': 'str'}, - 'artifact_type': {'key': 'artifactType', 'type': 'str'}, + "artifact_name": {"key": "artifactName", "type": "str"}, + "artifact_type": {"key": "artifactType", "type": "str"}, } - def __init__( - self, - *, - artifact_name: str, - **kwargs - ): + def __init__(self, *, artifact_name: str, **kwargs: Any) -> None: """ - :keyword artifact_name: Required. [Required] OneLake artifact name. + :keyword artifact_name: [Required] OneLake artifact name. Required. :paramtype artifact_name: str """ - super(LakeHouseArtifact, self).__init__(artifact_name=artifact_name, **kwargs) - self.artifact_type = 'LakeHouse' # type: str + super().__init__(artifact_name=artifact_name, **kwargs) + self.artifact_type: str = "LakeHouse" -class ListAmlUserFeatureResult(msrest.serialization.Model): +class ListAmlUserFeatureResult(_serialization.Model): """The List Aml user feature operation response. Variables are only populated by the server, and will be ignored when sending a request. @@ -20060,27 +19914,23 @@ class ListAmlUserFeatureResult(msrest.serialization.Model): """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + "value": {"readonly": True}, + "next_link": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[AmlUserFeature]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[AmlUserFeature]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ListAmlUserFeatureResult, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.value = None self.next_link = None -class ListNotebookKeysResult(msrest.serialization.Model): +class ListNotebookKeysResult(_serialization.Model): """ListNotebookKeysResult. Variables are only populated by the server, and will be ignored when sending a request. @@ -20092,27 +19942,23 @@ class ListNotebookKeysResult(msrest.serialization.Model): """ _validation = { - 'primary_access_key': {'readonly': True}, - 'secondary_access_key': {'readonly': True}, + "primary_access_key": {"readonly": True}, + "secondary_access_key": {"readonly": True}, } _attribute_map = { - 'primary_access_key': {'key': 'primaryAccessKey', 'type': 'str'}, - 'secondary_access_key': {'key': 'secondaryAccessKey', 'type': 'str'}, + "primary_access_key": {"key": "primaryAccessKey", "type": "str"}, + "secondary_access_key": {"key": "secondaryAccessKey", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ListNotebookKeysResult, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.primary_access_key = None self.secondary_access_key = None -class ListStorageAccountKeysResult(msrest.serialization.Model): +class ListStorageAccountKeysResult(_serialization.Model): """ListStorageAccountKeysResult. Variables are only populated by the server, and will be ignored when sending a request. @@ -20122,24 +19968,20 @@ class ListStorageAccountKeysResult(msrest.serialization.Model): """ _validation = { - 'user_storage_key': {'readonly': True}, + "user_storage_key": {"readonly": True}, } _attribute_map = { - 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, + "user_storage_key": {"key": "userStorageKey", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ListStorageAccountKeysResult, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.user_storage_key = None -class ListUsagesResult(msrest.serialization.Model): +class ListUsagesResult(_serialization.Model): """The List Usages operation response. Variables are only populated by the server, and will be ignored when sending a request. @@ -20152,27 +19994,23 @@ class ListUsagesResult(msrest.serialization.Model): """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + "value": {"readonly": True}, + "next_link": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[Usage]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[Usage]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ListUsagesResult, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.value = None self.next_link = None -class ListWorkspaceKeysResult(msrest.serialization.Model): +class ListWorkspaceKeysResult(_serialization.Model): """ListWorkspaceKeysResult. Variables are only populated by the server, and will be ignored when sending a request. @@ -20192,26 +20030,29 @@ class ListWorkspaceKeysResult(msrest.serialization.Model): """ _validation = { - 'app_insights_instrumentation_key': {'readonly': True}, - 'user_storage_arm_id': {'readonly': True}, - 'user_storage_key': {'readonly': True}, + "app_insights_instrumentation_key": {"readonly": True}, + "user_storage_arm_id": {"readonly": True}, + "user_storage_key": {"readonly": True}, } _attribute_map = { - 'app_insights_instrumentation_key': {'key': 'appInsightsInstrumentationKey', 'type': 'str'}, - 'container_registry_credentials': {'key': 'containerRegistryCredentials', 'type': 'RegistryListCredentialsResult'}, - 'notebook_access_keys': {'key': 'notebookAccessKeys', 'type': 'ListNotebookKeysResult'}, - 'user_storage_arm_id': {'key': 'userStorageArmId', 'type': 'str'}, - 'user_storage_key': {'key': 'userStorageKey', 'type': 'str'}, + "app_insights_instrumentation_key": {"key": "appInsightsInstrumentationKey", "type": "str"}, + "container_registry_credentials": { + "key": "containerRegistryCredentials", + "type": "RegistryListCredentialsResult", + }, + "notebook_access_keys": {"key": "notebookAccessKeys", "type": "ListNotebookKeysResult"}, + "user_storage_arm_id": {"key": "userStorageArmId", "type": "str"}, + "user_storage_key": {"key": "userStorageKey", "type": "str"}, } def __init__( self, *, - container_registry_credentials: Optional["RegistryListCredentialsResult"] = None, - notebook_access_keys: Optional["ListNotebookKeysResult"] = None, - **kwargs - ): + container_registry_credentials: Optional["_models.RegistryListCredentialsResult"] = None, + notebook_access_keys: Optional["_models.ListNotebookKeysResult"] = None, + **kwargs: Any + ) -> None: """ :keyword container_registry_credentials: :paramtype container_registry_credentials: @@ -20220,7 +20061,7 @@ def __init__( :paramtype notebook_access_keys: ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult """ - super(ListWorkspaceKeysResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.app_insights_instrumentation_key = None self.container_registry_credentials = container_registry_credentials self.notebook_access_keys = notebook_access_keys @@ -20228,7 +20069,7 @@ def __init__( self.user_storage_key = None -class ListWorkspaceQuotas(msrest.serialization.Model): +class ListWorkspaceQuotas(_serialization.Model): """The List WorkspaceQuotasByVMFamily operation response. Variables are only populated by the server, and will be ignored when sending a request. @@ -20241,22 +20082,18 @@ class ListWorkspaceQuotas(msrest.serialization.Model): """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + "value": {"readonly": True}, + "next_link": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[ResourceQuota]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[ResourceQuota]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ListWorkspaceQuotas, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.value = None self.next_link = None @@ -20268,40 +20105,34 @@ class LiteralJobInput(JobInput): :ivar description: Description for the input. :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :ivar value: Required. [Required] Literal value for the input. + :ivar value: [Required] Literal value for the input. Required. :vartype value: str """ _validation = { - 'job_input_type': {'required': True}, - 'value': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "job_input_type": {"required": True}, + "value": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "value": {"key": "value", "type": "str"}, } - def __init__( - self, - *, - value: str, - description: Optional[str] = None, - **kwargs - ): + def __init__(self, *, value: str, description: Optional[str] = None, **kwargs: Any) -> None: """ :keyword description: Description for the input. :paramtype description: str - :keyword value: Required. [Required] Literal value for the input. + :keyword value: [Required] Literal value for the input. Required. :paramtype value: str """ - super(LiteralJobInput, self).__init__(description=description, **kwargs) - self.job_input_type = 'literal' # type: str + super().__init__(description=description, **kwargs) + self.job_input_type: str = "literal" self.value = value @@ -20310,8 +20141,8 @@ class ManagedComputeIdentity(MonitorComputeIdentityBase): All required parameters must be populated in order to send to Azure. - :ivar compute_identity_type: Required. [Required] Monitor compute identity type enum.Constant - filled by server. Possible values include: "AmlToken", "ManagedIdentity". + :ivar compute_identity_type: [Required] Monitor compute identity type enum. Required. Known + values are: "AmlToken" and "ManagedIdentity". :vartype compute_identity_type: str or ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityType :ivar identity: Managed service identity (system assigned and/or user assigned identities). @@ -20319,26 +20150,21 @@ class ManagedComputeIdentity(MonitorComputeIdentityBase): """ _validation = { - 'compute_identity_type': {'required': True}, + "compute_identity_type": {"required": True}, } _attribute_map = { - 'compute_identity_type': {'key': 'computeIdentityType', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, + "compute_identity_type": {"key": "computeIdentityType", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, } - def __init__( - self, - *, - identity: Optional["ManagedServiceIdentity"] = None, - **kwargs - ): + def __init__(self, *, identity: Optional["_models.ManagedServiceIdentity"] = None, **kwargs: Any) -> None: """ :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity """ - super(ManagedComputeIdentity, self).__init__(**kwargs) - self.compute_identity_type = 'ManagedIdentity' # type: str + super().__init__(**kwargs) + self.compute_identity_type: str = "ManagedIdentity" self.identity = identity @@ -20347,8 +20173,8 @@ class ManagedIdentity(IdentityConfiguration): All required parameters must be populated in order to send to Azure. - :ivar identity_type: Required. [Required] Specifies the type of identity framework.Constant - filled by server. Possible values include: "Managed", "AMLToken", "UserIdentity". + :ivar identity_type: [Required] Specifies the type of identity framework. Required. Known + values are: "Managed", "AMLToken", and "UserIdentity". :vartype identity_type: str or ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType :ivar client_id: Specifies a user-assigned identity by client ID. For system-assigned, do not @@ -20363,14 +20189,14 @@ class ManagedIdentity(IdentityConfiguration): """ _validation = { - 'identity_type': {'required': True}, + "identity_type": {"required": True}, } _attribute_map = { - 'identity_type': {'key': 'identityType', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'object_id': {'key': 'objectId', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, + "identity_type": {"key": "identityType", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + "object_id": {"key": "objectId", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, } def __init__( @@ -20379,8 +20205,8 @@ def __init__( client_id: Optional[str] = None, object_id: Optional[str] = None, resource_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_id: Specifies a user-assigned identity by client ID. For system-assigned, do not set this field. @@ -20392,8 +20218,8 @@ def __init__( system-assigned, do not set this field. :paramtype resource_id: str """ - super(ManagedIdentity, self).__init__(**kwargs) - self.identity_type = 'Managed' # type: str + super().__init__(**kwargs) + self.identity_type: str = "Managed" self.client_id = client_id self.object_id = object_id self.resource_id = resource_id @@ -20402,21 +20228,28 @@ def __init__( class ManagedIdentityAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """ManagedIdentityAuthTypeWorkspaceConnectionProperties. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", + :ivar category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar created_by_workspace_arm_id: The arm id of the workspace which created this connection. + :vartype created_by_workspace_arm_id: str :ivar expiry_time: :vartype expiry_time: ~datetime.datetime + :ivar is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :vartype is_shared_to_all: bool :ivar metadata: Any object. - :vartype metadata: any + :vartype metadata: JSON :ivar target: :vartype target: str :ivar credentials: @@ -20425,50 +20258,64 @@ class ManagedIdentityAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPr """ _validation = { - 'auth_type': {'required': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionManagedIdentity'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionManagedIdentity"}, } def __init__( self, *, - category: Optional[Union[str, "ConnectionCategory"]] = None, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, expiry_time: Optional[datetime.datetime] = None, - metadata: Optional[Any] = None, + is_shared_to_all: Optional[bool] = None, + metadata: Optional[JSON] = None, target: Optional[str] = None, - credentials: Optional["WorkspaceConnectionManagedIdentity"] = None, - **kwargs - ): + credentials: Optional["_models.WorkspaceConnectionManagedIdentity"] = None, + **kwargs: Any + ) -> None: """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", + :keyword category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime + :keyword is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :paramtype is_shared_to_all: bool :keyword metadata: Any object. - :paramtype metadata: any + :paramtype metadata: JSON :keyword target: :paramtype target: str :keyword credentials: :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionManagedIdentity """ - super(ManagedIdentityAuthTypeWorkspaceConnectionProperties, self).__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) - self.auth_type = 'ManagedIdentity' # type: str + super().__init__( + category=category, + expiry_time=expiry_time, + is_shared_to_all=is_shared_to_all, + metadata=metadata, + target=target, + **kwargs + ) + self.auth_type: str = "ManagedIdentity" self.credentials = credentials -class ManagedNetworkProvisionOptions(msrest.serialization.Model): +class ManagedNetworkProvisionOptions(_serialization.Model): """Managed Network Provisioning options for managed network of a machine learning workspace. :ivar include_spark: @@ -20476,64 +20323,59 @@ class ManagedNetworkProvisionOptions(msrest.serialization.Model): """ _attribute_map = { - 'include_spark': {'key': 'includeSpark', 'type': 'bool'}, + "include_spark": {"key": "includeSpark", "type": "bool"}, } - def __init__( - self, - *, - include_spark: Optional[bool] = None, - **kwargs - ): + def __init__(self, *, include_spark: Optional[bool] = None, **kwargs: Any) -> None: """ :keyword include_spark: :paramtype include_spark: bool """ - super(ManagedNetworkProvisionOptions, self).__init__(**kwargs) + super().__init__(**kwargs) self.include_spark = include_spark -class ManagedNetworkProvisionStatus(msrest.serialization.Model): +class ManagedNetworkProvisionStatus(_serialization.Model): """Status of the Provisioning for the managed network of a machine learning workspace. :ivar spark_ready: :vartype spark_ready: bool - :ivar status: Status for the managed network of a machine learning workspace. Possible values - include: "Inactive", "Active". + :ivar status: Status for the managed network of a machine learning workspace. Known values are: + "Inactive" and "Active". :vartype status: str or ~azure.mgmt.machinelearningservices.models.ManagedNetworkStatus """ _attribute_map = { - 'spark_ready': {'key': 'sparkReady', 'type': 'bool'}, - 'status': {'key': 'status', 'type': 'str'}, + "spark_ready": {"key": "sparkReady", "type": "bool"}, + "status": {"key": "status", "type": "str"}, } def __init__( self, *, spark_ready: Optional[bool] = None, - status: Optional[Union[str, "ManagedNetworkStatus"]] = None, - **kwargs - ): + status: Optional[Union[str, "_models.ManagedNetworkStatus"]] = None, + **kwargs: Any + ) -> None: """ :keyword spark_ready: :paramtype spark_ready: bool - :keyword status: Status for the managed network of a machine learning workspace. Possible - values include: "Inactive", "Active". + :keyword status: Status for the managed network of a machine learning workspace. Known values + are: "Inactive" and "Active". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.ManagedNetworkStatus """ - super(ManagedNetworkProvisionStatus, self).__init__(**kwargs) + super().__init__(**kwargs) self.spark_ready = spark_ready self.status = status -class ManagedNetworkSettings(msrest.serialization.Model): +class ManagedNetworkSettings(_serialization.Model): """Managed Network settings for a machine learning workspace. Variables are only populated by the server, and will be ignored when sending a request. :ivar isolation_mode: Isolation mode for the managed network of a machine learning workspace. - Possible values include: "Disabled", "AllowInternetOutbound", "AllowOnlyApprovedOutbound". + Known values are: "Disabled", "AllowInternetOutbound", and "AllowOnlyApprovedOutbound". :vartype isolation_mode: str or ~azure.mgmt.machinelearningservices.models.IsolationMode :ivar network_id: :vartype network_id: str @@ -20542,30 +20384,36 @@ class ManagedNetworkSettings(msrest.serialization.Model): :ivar status: Status of the Provisioning for the managed network of a machine learning workspace. :vartype status: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus + :ivar changeable_isolation_modes: Detail isolation modes for the managed network of a machine + learning workspace. + :vartype changeable_isolation_modes: list[str or + ~azure.mgmt.machinelearningservices.models.IsolationMode] """ _validation = { - 'network_id': {'readonly': True}, + "network_id": {"readonly": True}, + "changeable_isolation_modes": {"readonly": True}, } _attribute_map = { - 'isolation_mode': {'key': 'isolationMode', 'type': 'str'}, - 'network_id': {'key': 'networkId', 'type': 'str'}, - 'outbound_rules': {'key': 'outboundRules', 'type': '{OutboundRule}'}, - 'status': {'key': 'status', 'type': 'ManagedNetworkProvisionStatus'}, + "isolation_mode": {"key": "isolationMode", "type": "str"}, + "network_id": {"key": "networkId", "type": "str"}, + "outbound_rules": {"key": "outboundRules", "type": "{OutboundRule}"}, + "status": {"key": "status", "type": "ManagedNetworkProvisionStatus"}, + "changeable_isolation_modes": {"key": "changeableIsolationModes", "type": "[str]"}, } def __init__( self, *, - isolation_mode: Optional[Union[str, "IsolationMode"]] = None, - outbound_rules: Optional[Dict[str, "OutboundRule"]] = None, - status: Optional["ManagedNetworkProvisionStatus"] = None, - **kwargs - ): + isolation_mode: Optional[Union[str, "_models.IsolationMode"]] = None, + outbound_rules: Optional[Dict[str, "_models.OutboundRule"]] = None, + status: Optional["_models.ManagedNetworkProvisionStatus"] = None, + **kwargs: Any + ) -> None: """ :keyword isolation_mode: Isolation mode for the managed network of a machine learning - workspace. Possible values include: "Disabled", "AllowInternetOutbound", + workspace. Known values are: "Disabled", "AllowInternetOutbound", and "AllowOnlyApprovedOutbound". :paramtype isolation_mode: str or ~azure.mgmt.machinelearningservices.models.IsolationMode :keyword outbound_rules: Dictionary of :code:``. @@ -20574,14 +20422,15 @@ def __init__( workspace. :paramtype status: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus """ - super(ManagedNetworkSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.isolation_mode = isolation_mode self.network_id = None self.outbound_rules = outbound_rules self.status = status + self.changeable_isolation_modes = None -class ManagedOnlineDeployment(OnlineDeploymentProperties): +class ManagedOnlineDeployment(OnlineDeploymentProperties): # pylint: disable=too-many-instance-attributes """Properties specific to a ManagedOnlineDeployment. Variables are only populated by the server, and will be ignored when sending a request. @@ -20604,12 +20453,12 @@ class ManagedOnlineDeployment(OnlineDeploymentProperties): :ivar data_collector: The mdc configuration, we disable mdc when it's null. :vartype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector :ivar egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Possible values include: "Enabled", + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and "Disabled". :vartype egress_public_network_access: str or ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType - :ivar endpoint_compute_type: Required. [Required] The compute type of the endpoint.Constant - filled by server. Possible values include: "Managed", "Kubernetes", "AzureMLCompute". + :ivar endpoint_compute_type: [Required] The compute type of the endpoint. Required. Known + values are: "Managed", "Kubernetes", and "AzureMLCompute". :vartype endpoint_compute_type: str or ~azure.mgmt.machinelearningservices.models.EndpointComputeType :ivar instance_type: Compute instance type. @@ -20620,8 +20469,8 @@ class ManagedOnlineDeployment(OnlineDeploymentProperties): :vartype model: str :ivar model_mount_path: The path to mount the model in custom container. :vartype model_mount_path: str - :ivar provisioning_state: Provisioning state for the endpoint deployment. Possible values - include: "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", "Canceled". + :ivar provisioning_state: Provisioning state for the endpoint deployment. Known values are: + "Creating", "Deleting", "Scaling", "Updating", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.DeploymentProvisioningState :ivar readiness_probe: Readiness probe validates if the container is ready to serve traffic. @@ -20637,50 +20486,50 @@ class ManagedOnlineDeployment(OnlineDeploymentProperties): """ _validation = { - 'endpoint_compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, + "endpoint_compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'code_configuration': {'key': 'codeConfiguration', 'type': 'CodeConfiguration'}, - 'description': {'key': 'description', 'type': 'str'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'app_insights_enabled': {'key': 'appInsightsEnabled', 'type': 'bool'}, - 'data_collector': {'key': 'dataCollector', 'type': 'DataCollector'}, - 'egress_public_network_access': {'key': 'egressPublicNetworkAccess', 'type': 'str'}, - 'endpoint_compute_type': {'key': 'endpointComputeType', 'type': 'str'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'liveness_probe': {'key': 'livenessProbe', 'type': 'ProbeSettings'}, - 'model': {'key': 'model', 'type': 'str'}, - 'model_mount_path': {'key': 'modelMountPath', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'readiness_probe': {'key': 'readinessProbe', 'type': 'ProbeSettings'}, - 'request_settings': {'key': 'requestSettings', 'type': 'OnlineRequestSettings'}, - 'scale_settings': {'key': 'scaleSettings', 'type': 'OnlineScaleSettings'}, + "code_configuration": {"key": "codeConfiguration", "type": "CodeConfiguration"}, + "description": {"key": "description", "type": "str"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "properties": {"key": "properties", "type": "{str}"}, + "app_insights_enabled": {"key": "appInsightsEnabled", "type": "bool"}, + "data_collector": {"key": "dataCollector", "type": "DataCollector"}, + "egress_public_network_access": {"key": "egressPublicNetworkAccess", "type": "str"}, + "endpoint_compute_type": {"key": "endpointComputeType", "type": "str"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "liveness_probe": {"key": "livenessProbe", "type": "ProbeSettings"}, + "model": {"key": "model", "type": "str"}, + "model_mount_path": {"key": "modelMountPath", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "readiness_probe": {"key": "readinessProbe", "type": "ProbeSettings"}, + "request_settings": {"key": "requestSettings", "type": "OnlineRequestSettings"}, + "scale_settings": {"key": "scaleSettings", "type": "OnlineScaleSettings"}, } def __init__( self, *, - code_configuration: Optional["CodeConfiguration"] = None, + code_configuration: Optional["_models.CodeConfiguration"] = None, description: Optional[str] = None, environment_id: Optional[str] = None, environment_variables: Optional[Dict[str, str]] = None, properties: Optional[Dict[str, str]] = None, - app_insights_enabled: Optional[bool] = False, - data_collector: Optional["DataCollector"] = None, - egress_public_network_access: Optional[Union[str, "EgressPublicNetworkAccessType"]] = None, + app_insights_enabled: bool = False, + data_collector: Optional["_models.DataCollector"] = None, + egress_public_network_access: Optional[Union[str, "_models.EgressPublicNetworkAccessType"]] = None, instance_type: Optional[str] = None, - liveness_probe: Optional["ProbeSettings"] = None, + liveness_probe: Optional["_models.ProbeSettings"] = None, model: Optional[str] = None, model_mount_path: Optional[str] = None, - readiness_probe: Optional["ProbeSettings"] = None, - request_settings: Optional["OnlineRequestSettings"] = None, - scale_settings: Optional["OnlineScaleSettings"] = None, - **kwargs - ): + readiness_probe: Optional["_models.ProbeSettings"] = None, + request_settings: Optional["_models.OnlineRequestSettings"] = None, + scale_settings: Optional["_models.OnlineScaleSettings"] = None, + **kwargs: Any + ) -> None: """ :keyword code_configuration: Code configuration for the endpoint deployment. :paramtype code_configuration: ~azure.mgmt.machinelearningservices.models.CodeConfiguration @@ -20698,7 +20547,7 @@ def __init__( :keyword data_collector: The mdc configuration, we disable mdc when it's null. :paramtype data_collector: ~azure.mgmt.machinelearningservices.models.DataCollector :keyword egress_public_network_access: If Enabled, allow egress public network access. If - Disabled, this will create secure egress. Default: Enabled. Possible values include: "Enabled", + Disabled, this will create secure egress. Default: Enabled. Known values are: "Enabled" and "Disabled". :paramtype egress_public_network_access: str or ~azure.mgmt.machinelearningservices.models.EgressPublicNetworkAccessType @@ -20721,11 +20570,28 @@ def __init__( and to DefaultScaleSettings for ManagedOnlineDeployment. :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.OnlineScaleSettings """ - super(ManagedOnlineDeployment, self).__init__(code_configuration=code_configuration, description=description, environment_id=environment_id, environment_variables=environment_variables, properties=properties, app_insights_enabled=app_insights_enabled, data_collector=data_collector, egress_public_network_access=egress_public_network_access, instance_type=instance_type, liveness_probe=liveness_probe, model=model, model_mount_path=model_mount_path, readiness_probe=readiness_probe, request_settings=request_settings, scale_settings=scale_settings, **kwargs) - self.endpoint_compute_type = 'Managed' # type: str - - -class ManagedServiceIdentity(msrest.serialization.Model): + super().__init__( + code_configuration=code_configuration, + description=description, + environment_id=environment_id, + environment_variables=environment_variables, + properties=properties, + app_insights_enabled=app_insights_enabled, + data_collector=data_collector, + egress_public_network_access=egress_public_network_access, + instance_type=instance_type, + liveness_probe=liveness_probe, + model=model, + model_mount_path=model_mount_path, + readiness_probe=readiness_probe, + request_settings=request_settings, + scale_settings=scale_settings, + **kwargs + ) + self.endpoint_compute_type: str = "Managed" + + +class ManagedServiceIdentity(_serialization.Model): """Managed service identity (system assigned and/or user assigned identities). Variables are only populated by the server, and will be ignored when sending a request. @@ -20738,9 +20604,9 @@ class ManagedServiceIdentity(msrest.serialization.Model): :ivar tenant_id: The tenant ID of the system assigned identity. This property will only be provided for a system assigned identity. :vartype tenant_id: str - :ivar type: Required. Type of managed service identity (where both SystemAssigned and - UserAssigned types are allowed). Possible values include: "None", "SystemAssigned", - "UserAssigned", "SystemAssigned,UserAssigned". + :ivar type: Type of managed service identity (where both SystemAssigned and UserAssigned types + are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and + "SystemAssigned,UserAssigned". :vartype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType :ivar user_assigned_identities: The set of user assigned identities associated with the resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: @@ -20751,29 +20617,29 @@ class ManagedServiceIdentity(msrest.serialization.Model): """ _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'type': {'required': True}, + "principal_id": {"readonly": True}, + "tenant_id": {"readonly": True}, + "type": {"required": True}, } _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + "principal_id": {"key": "principalId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{UserAssignedIdentity}"}, } def __init__( self, *, - type: Union[str, "ManagedServiceIdentityType"], - user_assigned_identities: Optional[Dict[str, "UserAssignedIdentity"]] = None, - **kwargs - ): + type: Union[str, "_models.ManagedServiceIdentityType"], + user_assigned_identities: Optional[Dict[str, "_models.UserAssignedIdentity"]] = None, + **kwargs: Any + ) -> None: """ - :keyword type: Required. Type of managed service identity (where both SystemAssigned and - UserAssigned types are allowed). Possible values include: "None", "SystemAssigned", - "UserAssigned", "SystemAssigned,UserAssigned". + :keyword type: Type of managed service identity (where both SystemAssigned and UserAssigned + types are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and + "SystemAssigned,UserAssigned". :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType :keyword user_assigned_identities: The set of user assigned identities associated with the resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: @@ -20782,14 +20648,14 @@ def __init__( :paramtype user_assigned_identities: dict[str, ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] """ - super(ManagedServiceIdentity, self).__init__(**kwargs) + super().__init__(**kwargs) self.principal_id = None self.tenant_id = None self.type = type self.user_assigned_identities = user_assigned_identities -class MaterializationComputeResource(msrest.serialization.Model): +class MaterializationComputeResource(_serialization.Model): """Dto object representing compute resource. :ivar instance_type: Specifies the instance type. @@ -20797,24 +20663,19 @@ class MaterializationComputeResource(msrest.serialization.Model): """ _attribute_map = { - 'instance_type': {'key': 'instanceType', 'type': 'str'}, + "instance_type": {"key": "instanceType", "type": "str"}, } - def __init__( - self, - *, - instance_type: Optional[str] = None, - **kwargs - ): + def __init__(self, *, instance_type: Optional[str] = None, **kwargs: Any) -> None: """ :keyword instance_type: Specifies the instance type. :paramtype instance_type: str """ - super(MaterializationComputeResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.instance_type = instance_type -class MaterializationSettings(msrest.serialization.Model): +class MaterializationSettings(_serialization.Model): """MaterializationSettings. :ivar notification: Specifies the notification details. @@ -20825,29 +20686,29 @@ class MaterializationSettings(msrest.serialization.Model): :vartype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceTrigger :ivar spark_configuration: Specifies the spark compute settings. :vartype spark_configuration: dict[str, str] - :ivar store_type: Specifies the stores to which materialization should happen. Possible values - include: "None", "Online", "Offline", "OnlineAndOffline". + :ivar store_type: Specifies the stores to which materialization should happen. Known values + are: "None", "Online", "Offline", and "OnlineAndOffline". :vartype store_type: str or ~azure.mgmt.machinelearningservices.models.MaterializationStoreType """ _attribute_map = { - 'notification': {'key': 'notification', 'type': 'NotificationSetting'}, - 'resource': {'key': 'resource', 'type': 'MaterializationComputeResource'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceTrigger'}, - 'spark_configuration': {'key': 'sparkConfiguration', 'type': '{str}'}, - 'store_type': {'key': 'storeType', 'type': 'str'}, + "notification": {"key": "notification", "type": "NotificationSetting"}, + "resource": {"key": "resource", "type": "MaterializationComputeResource"}, + "schedule": {"key": "schedule", "type": "RecurrenceTrigger"}, + "spark_configuration": {"key": "sparkConfiguration", "type": "{str}"}, + "store_type": {"key": "storeType", "type": "str"}, } def __init__( self, *, - notification: Optional["NotificationSetting"] = None, - resource: Optional["MaterializationComputeResource"] = None, - schedule: Optional["RecurrenceTrigger"] = None, + notification: Optional["_models.NotificationSetting"] = None, + resource: Optional["_models.MaterializationComputeResource"] = None, + schedule: Optional["_models.RecurrenceTrigger"] = None, spark_configuration: Optional[Dict[str, str]] = None, - store_type: Optional[Union[str, "MaterializationStoreType"]] = None, - **kwargs - ): + store_type: Optional[Union[str, "_models.MaterializationStoreType"]] = None, + **kwargs: Any + ) -> None: """ :keyword notification: Specifies the notification details. :paramtype notification: ~azure.mgmt.machinelearningservices.models.NotificationSetting @@ -20857,12 +20718,12 @@ def __init__( :paramtype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceTrigger :keyword spark_configuration: Specifies the spark compute settings. :paramtype spark_configuration: dict[str, str] - :keyword store_type: Specifies the stores to which materialization should happen. Possible - values include: "None", "Online", "Offline", "OnlineAndOffline". + :keyword store_type: Specifies the stores to which materialization should happen. Known values + are: "None", "Online", "Offline", and "OnlineAndOffline". :paramtype store_type: str or ~azure.mgmt.machinelearningservices.models.MaterializationStoreType """ - super(MaterializationSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.notification = notification self.resource = resource self.schedule = schedule @@ -20871,7 +20732,8 @@ def __init__( class MedianStoppingPolicy(EarlyTerminationPolicy): - """Defines an early termination policy based on running averages of the primary metric of all runs. + """Defines an early termination policy based on running averages of the primary metric of all + runs. All required parameters must be populated in order to send to Azure. @@ -20879,72 +20741,62 @@ class MedianStoppingPolicy(EarlyTerminationPolicy): :vartype delay_evaluation: int :ivar evaluation_interval: Interval (number of runs) between policy evaluations. :vartype evaluation_interval: int - :ivar policy_type: Required. [Required] Name of policy configuration.Constant filled by server. - Possible values include: "Bandit", "MedianStopping", "TruncationSelection". + :ivar policy_type: [Required] Name of policy configuration. Required. Known values are: + "Bandit", "MedianStopping", and "TruncationSelection". :vartype policy_type: str or ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType """ _validation = { - 'policy_type': {'required': True}, + "policy_type": {"required": True}, } _attribute_map = { - 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, - 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, - 'policy_type': {'key': 'policyType', 'type': 'str'}, + "delay_evaluation": {"key": "delayEvaluation", "type": "int"}, + "evaluation_interval": {"key": "evaluationInterval", "type": "int"}, + "policy_type": {"key": "policyType", "type": "str"}, } - def __init__( - self, - *, - delay_evaluation: Optional[int] = 0, - evaluation_interval: Optional[int] = 0, - **kwargs - ): + def __init__(self, *, delay_evaluation: int = 0, evaluation_interval: int = 0, **kwargs: Any) -> None: """ :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. :paramtype delay_evaluation: int :keyword evaluation_interval: Interval (number of runs) between policy evaluations. :paramtype evaluation_interval: int """ - super(MedianStoppingPolicy, self).__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) - self.policy_type = 'MedianStopping' # type: str + super().__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) + self.policy_type: str = "MedianStopping" -class MLAssistConfiguration(msrest.serialization.Model): +class MLAssistConfiguration(_serialization.Model): """Labeling MLAssist configuration definition. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MLAssistConfigurationDisabled, MLAssistConfigurationEnabled. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + MLAssistConfigurationDisabled, MLAssistConfigurationEnabled All required parameters must be populated in order to send to Azure. - :ivar ml_assist: Required. [Required] Indicates whether MLAssist feature is enabled.Constant - filled by server. Possible values include: "Enabled", "Disabled". + :ivar ml_assist: [Required] Indicates whether MLAssist feature is enabled. Required. Known + values are: "Enabled" and "Disabled". :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType """ _validation = { - 'ml_assist': {'required': True}, + "ml_assist": {"required": True}, } _attribute_map = { - 'ml_assist': {'key': 'mlAssist', 'type': 'str'}, + "ml_assist": {"key": "mlAssist", "type": "str"}, } _subtype_map = { - 'ml_assist': {'Disabled': 'MLAssistConfigurationDisabled', 'Enabled': 'MLAssistConfigurationEnabled'} + "ml_assist": {"Disabled": "MLAssistConfigurationDisabled", "Enabled": "MLAssistConfigurationEnabled"} } - def __init__( - self, - **kwargs - ): - """ - """ - super(MLAssistConfiguration, self).__init__(**kwargs) - self.ml_assist = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.ml_assist: Optional[str] = None class MLAssistConfigurationDisabled(MLAssistConfiguration): @@ -20952,27 +20804,23 @@ class MLAssistConfigurationDisabled(MLAssistConfiguration): All required parameters must be populated in order to send to Azure. - :ivar ml_assist: Required. [Required] Indicates whether MLAssist feature is enabled.Constant - filled by server. Possible values include: "Enabled", "Disabled". + :ivar ml_assist: [Required] Indicates whether MLAssist feature is enabled. Required. Known + values are: "Enabled" and "Disabled". :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType """ _validation = { - 'ml_assist': {'required': True}, + "ml_assist": {"required": True}, } _attribute_map = { - 'ml_assist': {'key': 'mlAssist', 'type': 'str'}, + "ml_assist": {"key": "mlAssist", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(MLAssistConfigurationDisabled, self).__init__(**kwargs) - self.ml_assist = 'Disabled' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.ml_assist: str = "Disabled" class MLAssistConfigurationEnabled(MLAssistConfiguration): @@ -20980,177 +20828,178 @@ class MLAssistConfigurationEnabled(MLAssistConfiguration): All required parameters must be populated in order to send to Azure. - :ivar ml_assist: Required. [Required] Indicates whether MLAssist feature is enabled.Constant - filled by server. Possible values include: "Enabled", "Disabled". + :ivar ml_assist: [Required] Indicates whether MLAssist feature is enabled. Required. Known + values are: "Enabled" and "Disabled". :vartype ml_assist: str or ~azure.mgmt.machinelearningservices.models.MLAssistConfigurationType - :ivar inferencing_compute_binding: Required. [Required] AML compute binding used in - inferencing. + :ivar inferencing_compute_binding: [Required] AML compute binding used in inferencing. + Required. :vartype inferencing_compute_binding: str - :ivar training_compute_binding: Required. [Required] AML compute binding used in training. + :ivar training_compute_binding: [Required] AML compute binding used in training. Required. :vartype training_compute_binding: str """ _validation = { - 'ml_assist': {'required': True}, - 'inferencing_compute_binding': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'training_compute_binding': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "ml_assist": {"required": True}, + "inferencing_compute_binding": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "training_compute_binding": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'ml_assist': {'key': 'mlAssist', 'type': 'str'}, - 'inferencing_compute_binding': {'key': 'inferencingComputeBinding', 'type': 'str'}, - 'training_compute_binding': {'key': 'trainingComputeBinding', 'type': 'str'}, + "ml_assist": {"key": "mlAssist", "type": "str"}, + "inferencing_compute_binding": {"key": "inferencingComputeBinding", "type": "str"}, + "training_compute_binding": {"key": "trainingComputeBinding", "type": "str"}, } - def __init__( - self, - *, - inferencing_compute_binding: str, - training_compute_binding: str, - **kwargs - ): + def __init__(self, *, inferencing_compute_binding: str, training_compute_binding: str, **kwargs: Any) -> None: """ - :keyword inferencing_compute_binding: Required. [Required] AML compute binding used in - inferencing. + :keyword inferencing_compute_binding: [Required] AML compute binding used in inferencing. + Required. :paramtype inferencing_compute_binding: str - :keyword training_compute_binding: Required. [Required] AML compute binding used in training. + :keyword training_compute_binding: [Required] AML compute binding used in training. Required. :paramtype training_compute_binding: str """ - super(MLAssistConfigurationEnabled, self).__init__(**kwargs) - self.ml_assist = 'Enabled' # type: str + super().__init__(**kwargs) + self.ml_assist: str = "Enabled" self.inferencing_compute_binding = inferencing_compute_binding self.training_compute_binding = training_compute_binding -class MLFlowModelJobInput(JobInput, AssetJobInput): +class MLFlowModelJobInput(AssetJobInput, JobInput): """MLFlowModelJobInput. All required parameters must be populated in order to send to Azure. - :ivar mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str :ivar description: Description for the input. :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str """ _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, uri: str, - mode: Optional[Union[str, "InputDeliveryMode"]] = None, description: Optional[str] = None, - **kwargs - ): + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + **kwargs: Any + ) -> None: """ - :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str :keyword description: Description for the input. :paramtype description: str + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str """ - super(MLFlowModelJobInput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_input_type: str = "mlflow_model" self.mode = mode self.uri = uri - self.job_input_type = 'mlflow_model' # type: str - self.description = description -class MLFlowModelJobOutput(JobOutput, AssetJobOutput): +class MLFlowModelJobOutput(AssetJobOutput, JobOutput): """MLFlowModelJobOutput. All required parameters must be populated in order to send to Azure. + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType :ivar asset_name: Output Asset Name. :vartype asset_name: str :ivar asset_version: Output Asset Version. :vartype asset_version: str :ivar auto_delete_setting: Auto delete setting of output data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and "Direct". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :ivar uri: Output Asset URI. :vartype uri: str - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by - server. Possible values include: "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType """ _validation = { - 'job_output_type': {'required': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, + description: Optional[str] = None, asset_name: Optional[str] = None, asset_version: Optional[str] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - mode: Optional[Union[str, "OutputDeliveryMode"]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, uri: Optional[str] = None, - description: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ + :keyword description: Description for the output. + :paramtype description: str :keyword asset_name: Output Asset Name. :paramtype asset_name: str :keyword asset_version: Output Asset Version. :paramtype asset_version: str :keyword auto_delete_setting: Auto delete setting of output data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and "Direct". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :keyword uri: Output Asset URI. :paramtype uri: str - :keyword description: Description for the output. - :paramtype description: str """ - super(MLFlowModelJobOutput, self).__init__(description=description, asset_name=asset_name, asset_version=asset_version, auto_delete_setting=auto_delete_setting, mode=mode, uri=uri, **kwargs) + super().__init__( + asset_name=asset_name, + asset_version=asset_version, + auto_delete_setting=auto_delete_setting, + mode=mode, + uri=uri, + description=description, + **kwargs + ) + self.description = description + self.job_output_type: str = "mlflow_model" self.asset_name = asset_name self.asset_version = asset_version self.auto_delete_setting = auto_delete_setting self.mode = mode self.uri = uri - self.job_output_type = 'mlflow_model' # type: str - self.description = description -class MLTableData(DataVersionBaseProperties): +class MLTableData(DataVersionBaseProperties): # pylint: disable=too-many-instance-attributes """MLTable data definition. All required parameters must be populated in order to send to Azure. @@ -21159,7 +21008,7 @@ class MLTableData(DataVersionBaseProperties): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -21169,11 +21018,11 @@ class MLTableData(DataVersionBaseProperties): :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is provided it will be used to populate IsArchived. :vartype is_archived: bool - :ivar data_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "uri_file", "uri_folder", "mltable". + :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", + "uri_folder", and "mltable". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType - :ivar data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. + :ivar data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. :vartype data_uri: str :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual Property. @@ -21185,22 +21034,22 @@ class MLTableData(DataVersionBaseProperties): """ _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "data_type": {"required": True}, + "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, - 'referenced_uris': {'key': 'referencedUris', 'type': '[str]'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, + "referenced_uris": {"key": "referencedUris", "type": "[str]"}, } def __init__( @@ -21210,20 +21059,20 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - is_anonymous: Optional[bool] = False, - is_archived: Optional[bool] = False, - intellectual_property: Optional["IntellectualProperty"] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + intellectual_property: Optional["_models.IntellectualProperty"] = None, stage: Optional[str] = None, referenced_uris: Optional[List[str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -21233,8 +21082,8 @@ def __init__( :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is provided it will be used to populate IsArchived. :paramtype is_archived: bool - :keyword data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. + :keyword data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. :paramtype data_uri: str :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual Property. @@ -21245,167 +21094,185 @@ def __init__( :keyword referenced_uris: Uris referenced in the MLTable definition (required for lineage). :paramtype referenced_uris: list[str] """ - super(MLTableData, self).__init__(description=description, properties=properties, tags=tags, auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, data_uri=data_uri, intellectual_property=intellectual_property, stage=stage, **kwargs) - self.data_type = 'mltable' # type: str + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + data_uri=data_uri, + intellectual_property=intellectual_property, + stage=stage, + **kwargs + ) + self.data_type: str = "mltable" self.referenced_uris = referenced_uris -class MLTableJobInput(JobInput, AssetJobInput): +class MLTableJobInput(AssetJobInput, JobInput): """MLTableJobInput. All required parameters must be populated in order to send to Azure. - :ivar mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str :ivar description: Description for the input. :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str """ _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, uri: str, - mode: Optional[Union[str, "InputDeliveryMode"]] = None, description: Optional[str] = None, - **kwargs - ): + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + **kwargs: Any + ) -> None: """ - :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str :keyword description: Description for the input. :paramtype description: str + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str """ - super(MLTableJobInput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_input_type: str = "mltable" self.mode = mode self.uri = uri - self.job_input_type = 'mltable' # type: str - self.description = description -class MLTableJobOutput(JobOutput, AssetJobOutput): +class MLTableJobOutput(AssetJobOutput, JobOutput): """MLTableJobOutput. All required parameters must be populated in order to send to Azure. + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType :ivar asset_name: Output Asset Name. :vartype asset_name: str :ivar asset_version: Output Asset Version. :vartype asset_version: str :ivar auto_delete_setting: Auto delete setting of output data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and "Direct". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :ivar uri: Output Asset URI. :vartype uri: str - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by - server. Possible values include: "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType """ _validation = { - 'job_output_type': {'required': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, + description: Optional[str] = None, asset_name: Optional[str] = None, asset_version: Optional[str] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - mode: Optional[Union[str, "OutputDeliveryMode"]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, uri: Optional[str] = None, - description: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ + :keyword description: Description for the output. + :paramtype description: str :keyword asset_name: Output Asset Name. :paramtype asset_name: str :keyword asset_version: Output Asset Version. :paramtype asset_version: str :keyword auto_delete_setting: Auto delete setting of output data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and "Direct". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :keyword uri: Output Asset URI. :paramtype uri: str - :keyword description: Description for the output. - :paramtype description: str """ - super(MLTableJobOutput, self).__init__(description=description, asset_name=asset_name, asset_version=asset_version, auto_delete_setting=auto_delete_setting, mode=mode, uri=uri, **kwargs) + super().__init__( + asset_name=asset_name, + asset_version=asset_version, + auto_delete_setting=auto_delete_setting, + mode=mode, + uri=uri, + description=description, + **kwargs + ) + self.description = description + self.job_output_type: str = "mltable" self.asset_name = asset_name self.asset_version = asset_version self.auto_delete_setting = auto_delete_setting self.mode = mode self.uri = uri - self.job_output_type = 'mltable' # type: str - self.description = description -class ModelConfiguration(msrest.serialization.Model): +class ModelConfiguration(_serialization.Model): """Model configuration options. - :ivar mode: Input delivery mode for the model. Possible values include: "Copy", "Download". + :ivar mode: Input delivery mode for the model. Known values are: "Copy" and "Download". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.PackageInputDeliveryMode :ivar mount_path: Relative mounting path of the model in the target image. :vartype mount_path: str """ _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'mount_path': {'key': 'mountPath', 'type': 'str'}, + "mode": {"key": "mode", "type": "str"}, + "mount_path": {"key": "mountPath", "type": "str"}, } def __init__( self, *, - mode: Optional[Union[str, "PackageInputDeliveryMode"]] = None, + mode: Optional[Union[str, "_models.PackageInputDeliveryMode"]] = None, mount_path: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword mode: Input delivery mode for the model. Possible values include: "Copy", "Download". + :keyword mode: Input delivery mode for the model. Known values are: "Copy" and "Download". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.PackageInputDeliveryMode :keyword mount_path: Relative mounting path of the model in the target image. :paramtype mount_path: str """ - super(ModelConfiguration, self).__init__(**kwargs) + super().__init__(**kwargs) self.mode = mode self.mount_path = mount_path @@ -21428,37 +21295,32 @@ class ModelContainer(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.ModelContainerProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ModelContainerProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ModelContainerProperties"}, } - def __init__( - self, - *, - properties: "ModelContainerProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.ModelContainerProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ModelContainerProperties """ - super(ModelContainer, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties @@ -21471,7 +21333,7 @@ class ModelContainerProperties(AssetContainer): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar is_archived: Is the asset archived?. :vartype is_archived: bool @@ -21479,26 +21341,26 @@ class ModelContainerProperties(AssetContainer): :vartype latest_version: str :ivar next_version: The next auto incremental version. :vartype next_version: str - :ivar provisioning_state: Provisioning state for the model container. Possible values include: - "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :ivar provisioning_state: Provisioning state for the model container. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.AssetProvisioningState """ _validation = { - 'latest_version': {'readonly': True}, - 'next_version': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "latest_version": {"readonly": True}, + "next_version": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'latest_version': {'key': 'latestVersion', 'type': 'str'}, - 'next_version': {'key': 'nextVersion', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "latest_version": {"key": "latestVersion", "type": "str"}, + "next_version": {"key": "nextVersion", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } def __init__( @@ -21507,24 +21369,24 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - is_archived: Optional[bool] = False, - **kwargs - ): + is_archived: bool = False, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword is_archived: Is the asset archived?. :paramtype is_archived: bool """ - super(ModelContainerProperties, self).__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) + super().__init__(description=description, properties=properties, tags=tags, is_archived=is_archived, **kwargs) self.provisioning_state = None -class ModelContainerResourceArmPaginatedResult(msrest.serialization.Model): +class ModelContainerResourceArmPaginatedResult(_serialization.Model): """A paginated list of ModelContainer entities. :ivar next_link: The link to the next page of ModelContainer objects. If null, there are no @@ -21535,17 +21397,13 @@ class ModelContainerResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ModelContainer]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ModelContainer]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["ModelContainer"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.ModelContainer"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of ModelContainer objects. If null, there are no additional pages. @@ -21553,60 +21411,60 @@ def __init__( :keyword value: An array of objects of type ModelContainer. :paramtype value: list[~azure.mgmt.machinelearningservices.models.ModelContainer] """ - super(ModelContainerResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class ModelPackageInput(msrest.serialization.Model): +class ModelPackageInput(_serialization.Model): """Model package input options. All required parameters must be populated in order to send to Azure. - :ivar input_type: Required. [Required] Type of the input included in the target image. Possible - values include: "UriFile", "UriFolder". + :ivar input_type: [Required] Type of the input included in the target image. Required. Known + values are: "UriFile" and "UriFolder". :vartype input_type: str or ~azure.mgmt.machinelearningservices.models.PackageInputType - :ivar mode: Input delivery mode of the input. Possible values include: "Copy", "Download". + :ivar mode: Input delivery mode of the input. Known values are: "Copy" and "Download". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.PackageInputDeliveryMode :ivar mount_path: Relative mount path of the input in the target image. :vartype mount_path: str - :ivar path: Required. [Required] Location of the input. + :ivar path: [Required] Location of the input. Required. :vartype path: ~azure.mgmt.machinelearningservices.models.PackageInputPathBase """ _validation = { - 'input_type': {'required': True}, - 'path': {'required': True}, + "input_type": {"required": True}, + "path": {"required": True}, } _attribute_map = { - 'input_type': {'key': 'inputType', 'type': 'str'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'mount_path': {'key': 'mountPath', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'PackageInputPathBase'}, + "input_type": {"key": "inputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "mount_path": {"key": "mountPath", "type": "str"}, + "path": {"key": "path", "type": "PackageInputPathBase"}, } def __init__( self, *, - input_type: Union[str, "PackageInputType"], - path: "PackageInputPathBase", - mode: Optional[Union[str, "PackageInputDeliveryMode"]] = None, + input_type: Union[str, "_models.PackageInputType"], + path: "_models.PackageInputPathBase", + mode: Optional[Union[str, "_models.PackageInputDeliveryMode"]] = None, mount_path: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword input_type: Required. [Required] Type of the input included in the target image. - Possible values include: "UriFile", "UriFolder". + :keyword input_type: [Required] Type of the input included in the target image. Required. Known + values are: "UriFile" and "UriFolder". :paramtype input_type: str or ~azure.mgmt.machinelearningservices.models.PackageInputType - :keyword mode: Input delivery mode of the input. Possible values include: "Copy", "Download". + :keyword mode: Input delivery mode of the input. Known values are: "Copy" and "Download". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.PackageInputDeliveryMode :keyword mount_path: Relative mount path of the input in the target image. :paramtype mount_path: str - :keyword path: Required. [Required] Location of the input. + :keyword path: [Required] Location of the input. Required. :paramtype path: ~azure.mgmt.machinelearningservices.models.PackageInputPathBase """ - super(ModelPackageInput, self).__init__(**kwargs) + super().__init__(**kwargs) self.input_type = input_type self.mode = mode self.mount_path = mount_path @@ -21618,113 +21476,107 @@ class ModelPerformanceSignal(MonitoringSignalBase): All required parameters must be populated in order to send to Azure. - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and "Enabled". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :ivar properties: Property dictionary. Properties can be added, but not removed or altered. :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType :ivar data_segment: The data segment. :vartype data_segment: ~azure.mgmt.machinelearningservices.models.MonitoringDataSegment - :ivar metric_threshold: Required. [Required] A list of metrics to calculate and their - associated thresholds. + :ivar metric_threshold: [Required] A list of metrics to calculate and their associated + thresholds. Required. :vartype metric_threshold: ~azure.mgmt.machinelearningservices.models.ModelPerformanceMetricThresholdBase - :ivar production_data: Required. [Required] The data produced by the production service which - drift will be calculated for. + :ivar production_data: [Required] The data produced by the production service which drift will + be calculated for. Required. :vartype production_data: list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] - :ivar reference_data: Required. [Required] The data to calculate drift against. + :ivar reference_data: [Required] The data to calculate drift against. Required. :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ _validation = { - 'signal_type': {'required': True}, - 'metric_threshold': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, + "signal_type": {"required": True}, + "metric_threshold": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'data_segment': {'key': 'dataSegment', 'type': 'MonitoringDataSegment'}, - 'metric_threshold': {'key': 'metricThreshold', 'type': 'ModelPerformanceMetricThresholdBase'}, - 'production_data': {'key': 'productionData', 'type': '[MonitoringInputDataBase]'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "data_segment": {"key": "dataSegment", "type": "MonitoringDataSegment"}, + "metric_threshold": {"key": "metricThreshold", "type": "ModelPerformanceMetricThresholdBase"}, + "production_data": {"key": "productionData", "type": "[MonitoringInputDataBase]"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, } def __init__( self, *, - metric_threshold: "ModelPerformanceMetricThresholdBase", - production_data: List["MonitoringInputDataBase"], - reference_data: "MonitoringInputDataBase", - mode: Optional[Union[str, "MonitoringNotificationMode"]] = None, + metric_threshold: "_models.ModelPerformanceMetricThresholdBase", + production_data: List["_models.MonitoringInputDataBase"], + reference_data: "_models.MonitoringInputDataBase", + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, properties: Optional[Dict[str, str]] = None, - data_segment: Optional["MonitoringDataSegment"] = None, - **kwargs - ): + data_segment: Optional["_models.MonitoringDataSegment"] = None, + **kwargs: Any + ) -> None: """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :keyword properties: Property dictionary. Properties can be added, but not removed or altered. :paramtype properties: dict[str, str] :keyword data_segment: The data segment. :paramtype data_segment: ~azure.mgmt.machinelearningservices.models.MonitoringDataSegment - :keyword metric_threshold: Required. [Required] A list of metrics to calculate and their - associated thresholds. + :keyword metric_threshold: [Required] A list of metrics to calculate and their associated + thresholds. Required. :paramtype metric_threshold: ~azure.mgmt.machinelearningservices.models.ModelPerformanceMetricThresholdBase - :keyword production_data: Required. [Required] The data produced by the production service - which drift will be calculated for. + :keyword production_data: [Required] The data produced by the production service which drift + will be calculated for. Required. :paramtype production_data: list[~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase] - :keyword reference_data: Required. [Required] The data to calculate drift against. + :keyword reference_data: [Required] The data to calculate drift against. Required. :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ - super(ModelPerformanceSignal, self).__init__(mode=mode, properties=properties, **kwargs) - self.signal_type = 'ModelPerformance' # type: str + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "ModelPerformance" self.data_segment = data_segment self.metric_threshold = metric_threshold self.production_data = production_data self.reference_data = reference_data -class ModelProfile(msrest.serialization.Model): +class ModelProfile(_serialization.Model): """ModelProfile. All required parameters must be populated in order to send to Azure. - :ivar model_uri: Required. [Required] The model to create a serverless endpoint of. + :ivar model_uri: [Required] The model to create a serverless endpoint of. Required. :vartype model_uri: str """ _validation = { - 'model_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "model_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'model_uri': {'key': 'modelUri', 'type': 'str'}, + "model_uri": {"key": "modelUri", "type": "str"}, } - def __init__( - self, - *, - model_uri: str, - **kwargs - ): + def __init__(self, *, model_uri: str, **kwargs: Any) -> None: """ - :keyword model_uri: Required. [Required] The model to create a serverless endpoint of. + :keyword model_uri: [Required] The model to create a serverless endpoint of. Required. :paramtype model_uri: str """ - super(ModelProfile, self).__init__(**kwargs) + super().__init__(**kwargs) self.model_uri = model_uri @@ -21746,41 +21598,36 @@ class ModelVersion(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.ModelVersionProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ModelVersionProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ModelVersionProperties"}, } - def __init__( - self, - *, - properties: "ModelVersionProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.ModelVersionProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ModelVersionProperties """ - super(ModelVersion, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class ModelVersionProperties(AssetBase): +class ModelVersionProperties(AssetBase): # pylint: disable=too-many-instance-attributes """Model asset version details. Variables are only populated by the server, and will be ignored when sending a request. @@ -21789,7 +21636,7 @@ class ModelVersionProperties(AssetBase): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -21810,8 +21657,8 @@ class ModelVersionProperties(AssetBase): :vartype model_type: str :ivar model_uri: The URI path to the model contents. :vartype model_uri: str - :ivar provisioning_state: Provisioning state for the model version. Possible values include: - "Succeeded", "Failed", "Canceled", "Creating", "Updating", "Deleting". + :ivar provisioning_state: Provisioning state for the model version. Known values are: + "Succeeded", "Failed", "Canceled", "Creating", "Updating", and "Deleting". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.AssetProvisioningState :ivar stage: Stage in the model lifecycle assigned to this model. @@ -21819,23 +21666,23 @@ class ModelVersionProperties(AssetBase): """ _validation = { - 'provisioning_state': {'readonly': True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'flavors': {'key': 'flavors', 'type': '{FlavorData}'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'job_name': {'key': 'jobName', 'type': 'str'}, - 'model_type': {'key': 'modelType', 'type': 'str'}, - 'model_uri': {'key': 'modelUri', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "flavors": {"key": "flavors", "type": "{FlavorData}"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "job_name": {"key": "jobName", "type": "str"}, + "model_type": {"key": "modelType", "type": "str"}, + "model_uri": {"key": "modelUri", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "stage": {"key": "stage", "type": "str"}, } def __init__( @@ -21844,23 +21691,23 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - is_anonymous: Optional[bool] = False, - is_archived: Optional[bool] = False, - flavors: Optional[Dict[str, "FlavorData"]] = None, - intellectual_property: Optional["IntellectualProperty"] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + flavors: Optional[Dict[str, "_models.FlavorData"]] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, job_name: Optional[str] = None, model_type: Optional[str] = None, model_uri: Optional[str] = None, stage: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -21885,7 +21732,15 @@ def __init__( :keyword stage: Stage in the model lifecycle assigned to this model. :paramtype stage: str """ - super(ModelVersionProperties, self).__init__(description=description, properties=properties, tags=tags, auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, **kwargs) + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + **kwargs + ) self.flavors = flavors self.intellectual_property = intellectual_property self.job_name = job_name @@ -21895,7 +21750,7 @@ def __init__( self.stage = stage -class ModelVersionResourceArmPaginatedResult(msrest.serialization.Model): +class ModelVersionResourceArmPaginatedResult(_serialization.Model): """A paginated list of ModelVersion entities. :ivar next_link: The link to the next page of ModelVersion objects. If null, there are no @@ -21906,17 +21761,13 @@ class ModelVersionResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ModelVersion]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ModelVersion]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["ModelVersion"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.ModelVersion"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of ModelVersion objects. If null, there are no additional pages. @@ -21924,47 +21775,41 @@ def __init__( :keyword value: An array of objects of type ModelVersion. :paramtype value: list[~azure.mgmt.machinelearningservices.models.ModelVersion] """ - super(ModelVersionResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class MonitorComputeConfigurationBase(msrest.serialization.Model): +class MonitorComputeConfigurationBase(_serialization.Model): """Monitor compute configuration base definition. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: MonitorServerlessSparkCompute. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + MonitorServerlessSparkCompute All required parameters must be populated in order to send to Azure. - :ivar compute_type: Required. [Required] Specifies the type of signal to monitor.Constant - filled by server. Possible values include: "ServerlessSpark". + :ivar compute_type: [Required] Specifies the type of signal to monitor. Required. + "ServerlessSpark" :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.MonitorComputeType """ _validation = { - 'compute_type': {'required': True}, + "compute_type": {"required": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, + "compute_type": {"key": "computeType", "type": "str"}, } - _subtype_map = { - 'compute_type': {'ServerlessSpark': 'MonitorServerlessSparkCompute'} - } + _subtype_map = {"compute_type": {"ServerlessSpark": "MonitorServerlessSparkCompute"}} - def __init__( - self, - **kwargs - ): - """ - """ - super(MonitorComputeConfigurationBase, self).__init__(**kwargs) - self.compute_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.compute_type: Optional[str] = None -class MonitorDefinition(msrest.serialization.Model): +class MonitorDefinition(_serialization.Model): """MonitorDefinition. All required parameters must be populated in order to send to Azure. @@ -21972,60 +21817,63 @@ class MonitorDefinition(msrest.serialization.Model): :ivar alert_notification_setting: The monitor's notification settings. :vartype alert_notification_setting: ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationSettingsBase - :ivar compute_configuration: Required. [Required] The ARM resource ID of the compute resource - to run the monitoring job on. + :ivar compute_configuration: [Required] The ARM resource ID of the compute resource to run the + monitoring job on. Required. :vartype compute_configuration: ~azure.mgmt.machinelearningservices.models.MonitorComputeConfigurationBase :ivar monitoring_target: The ARM resource ID of either the model or deployment targeted by this monitor. :vartype monitoring_target: ~azure.mgmt.machinelearningservices.models.MonitoringTarget - :ivar signals: Required. [Required] The signals to monitor. + :ivar signals: [Required] The signals to monitor. Required. :vartype signals: dict[str, ~azure.mgmt.machinelearningservices.models.MonitoringSignalBase] """ _validation = { - 'compute_configuration': {'required': True}, - 'signals': {'required': True}, + "compute_configuration": {"required": True}, + "signals": {"required": True}, } _attribute_map = { - 'alert_notification_setting': {'key': 'alertNotificationSetting', 'type': 'MonitoringAlertNotificationSettingsBase'}, - 'compute_configuration': {'key': 'computeConfiguration', 'type': 'MonitorComputeConfigurationBase'}, - 'monitoring_target': {'key': 'monitoringTarget', 'type': 'MonitoringTarget'}, - 'signals': {'key': 'signals', 'type': '{MonitoringSignalBase}'}, + "alert_notification_setting": { + "key": "alertNotificationSetting", + "type": "MonitoringAlertNotificationSettingsBase", + }, + "compute_configuration": {"key": "computeConfiguration", "type": "MonitorComputeConfigurationBase"}, + "monitoring_target": {"key": "monitoringTarget", "type": "MonitoringTarget"}, + "signals": {"key": "signals", "type": "{MonitoringSignalBase}"}, } def __init__( self, *, - compute_configuration: "MonitorComputeConfigurationBase", - signals: Dict[str, "MonitoringSignalBase"], - alert_notification_setting: Optional["MonitoringAlertNotificationSettingsBase"] = None, - monitoring_target: Optional["MonitoringTarget"] = None, - **kwargs - ): + compute_configuration: "_models.MonitorComputeConfigurationBase", + signals: Dict[str, "_models.MonitoringSignalBase"], + alert_notification_setting: Optional["_models.MonitoringAlertNotificationSettingsBase"] = None, + monitoring_target: Optional["_models.MonitoringTarget"] = None, + **kwargs: Any + ) -> None: """ :keyword alert_notification_setting: The monitor's notification settings. :paramtype alert_notification_setting: ~azure.mgmt.machinelearningservices.models.MonitoringAlertNotificationSettingsBase - :keyword compute_configuration: Required. [Required] The ARM resource ID of the compute - resource to run the monitoring job on. + :keyword compute_configuration: [Required] The ARM resource ID of the compute resource to run + the monitoring job on. Required. :paramtype compute_configuration: ~azure.mgmt.machinelearningservices.models.MonitorComputeConfigurationBase :keyword monitoring_target: The ARM resource ID of either the model or deployment targeted by this monitor. :paramtype monitoring_target: ~azure.mgmt.machinelearningservices.models.MonitoringTarget - :keyword signals: Required. [Required] The signals to monitor. + :keyword signals: [Required] The signals to monitor. Required. :paramtype signals: dict[str, ~azure.mgmt.machinelearningservices.models.MonitoringSignalBase] """ - super(MonitorDefinition, self).__init__(**kwargs) + super().__init__(**kwargs) self.alert_notification_setting = alert_notification_setting self.compute_configuration = compute_configuration self.monitoring_target = monitoring_target self.signals = signals -class MonitoringDataSegment(msrest.serialization.Model): +class MonitoringDataSegment(_serialization.Model): """MonitoringDataSegment. :ivar feature: The feature to segment the data on. @@ -22035,29 +21883,23 @@ class MonitoringDataSegment(msrest.serialization.Model): """ _attribute_map = { - 'feature': {'key': 'feature', 'type': 'str'}, - 'values': {'key': 'values', 'type': '[str]'}, + "feature": {"key": "feature", "type": "str"}, + "values": {"key": "values", "type": "[str]"}, } - def __init__( - self, - *, - feature: Optional[str] = None, - values: Optional[List[str]] = None, - **kwargs - ): + def __init__(self, *, feature: Optional[str] = None, values: Optional[List[str]] = None, **kwargs: Any) -> None: """ :keyword feature: The feature to segment the data on. :paramtype feature: str :keyword values: Filters for only the specified values of the given segmented feature. :paramtype values: list[str] """ - super(MonitoringDataSegment, self).__init__(**kwargs) + super().__init__(**kwargs) self.feature = feature self.values = values -class MonitoringTarget(msrest.serialization.Model): +class MonitoringTarget(_serialization.Model): """Monitoring target definition. All required parameters must be populated in order to send to Azure. @@ -22066,45 +21908,45 @@ class MonitoringTarget(msrest.serialization.Model): :vartype deployment_id: str :ivar model_id: The ARM resource ID of either the model targeted by this monitor. :vartype model_id: str - :ivar task_type: Required. [Required] The machine learning task type of the model. Possible - values include: "Classification", "Regression", "QuestionAnswering". + :ivar task_type: [Required] The machine learning task type of the model. Required. Known values + are: "Classification", "Regression", and "QuestionAnswering". :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.ModelTaskType """ _validation = { - 'task_type': {'required': True}, + "task_type": {"required": True}, } _attribute_map = { - 'deployment_id': {'key': 'deploymentId', 'type': 'str'}, - 'model_id': {'key': 'modelId', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, + "deployment_id": {"key": "deploymentId", "type": "str"}, + "model_id": {"key": "modelId", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, } def __init__( self, *, - task_type: Union[str, "ModelTaskType"], + task_type: Union[str, "_models.ModelTaskType"], deployment_id: Optional[str] = None, model_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword deployment_id: The ARM resource ID of either the deployment targeted by this monitor. :paramtype deployment_id: str :keyword model_id: The ARM resource ID of either the model targeted by this monitor. :paramtype model_id: str - :keyword task_type: Required. [Required] The machine learning task type of the model. Possible - values include: "Classification", "Regression", "QuestionAnswering". + :keyword task_type: [Required] The machine learning task type of the model. Required. Known + values are: "Classification", "Regression", and "QuestionAnswering". :paramtype task_type: str or ~azure.mgmt.machinelearningservices.models.ModelTaskType """ - super(MonitoringTarget, self).__init__(**kwargs) + super().__init__(**kwargs) self.deployment_id = deployment_id self.model_id = model_id self.task_type = task_type -class MonitoringThreshold(msrest.serialization.Model): +class MonitoringThreshold(_serialization.Model): """MonitoringThreshold. :ivar value: The threshold value. If null, the set default is dependent on the metric type. @@ -22112,24 +21954,19 @@ class MonitoringThreshold(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': 'float'}, + "value": {"key": "value", "type": "float"}, } - def __init__( - self, - *, - value: Optional[float] = None, - **kwargs - ): + def __init__(self, *, value: Optional[float] = None, **kwargs: Any) -> None: """ :keyword value: The threshold value. If null, the set default is dependent on the metric type. :paramtype value: float """ - super(MonitoringThreshold, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value -class MonitoringWorkspaceConnection(msrest.serialization.Model): +class MonitoringWorkspaceConnection(_serialization.Model): """Monitoring workspace connection definition. :ivar environment_variables: The properties of a workspace service connection to store as @@ -22143,8 +21980,8 @@ class MonitoringWorkspaceConnection(msrest.serialization.Model): """ _attribute_map = { - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'secrets': {'key': 'secrets', 'type': '{str}'}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "secrets": {"key": "secrets", "type": "{str}"}, } def __init__( @@ -22152,8 +21989,8 @@ def __init__( *, environment_variables: Optional[Dict[str, str]] = None, secrets: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword environment_variables: The properties of a workspace service connection to store as environment variables in the submitted jobs. @@ -22164,7 +22001,7 @@ def __init__( Key is workspace connection property path, name is secret key. :paramtype secrets: dict[str, str] """ - super(MonitoringWorkspaceConnection, self).__init__(**kwargs) + super().__init__(**kwargs) self.environment_variables = environment_variables self.secrets = secrets @@ -22174,53 +22011,53 @@ class MonitorServerlessSparkCompute(MonitorComputeConfigurationBase): All required parameters must be populated in order to send to Azure. - :ivar compute_type: Required. [Required] Specifies the type of signal to monitor.Constant - filled by server. Possible values include: "ServerlessSpark". + :ivar compute_type: [Required] Specifies the type of signal to monitor. Required. + "ServerlessSpark" :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.MonitorComputeType - :ivar compute_identity: Required. [Required] The identity scheme leveraged to by the spark jobs - running on serverless Spark. + :ivar compute_identity: [Required] The identity scheme leveraged to by the spark jobs running + on serverless Spark. Required. :vartype compute_identity: ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityBase - :ivar instance_type: Required. [Required] The instance type running the Spark job. + :ivar instance_type: [Required] The instance type running the Spark job. Required. :vartype instance_type: str - :ivar runtime_version: Required. [Required] The Spark runtime version. + :ivar runtime_version: [Required] The Spark runtime version. Required. :vartype runtime_version: str """ _validation = { - 'compute_type': {'required': True}, - 'compute_identity': {'required': True}, - 'instance_type': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'runtime_version': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "compute_type": {"required": True}, + "compute_identity": {"required": True}, + "instance_type": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "runtime_version": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_identity': {'key': 'computeIdentity', 'type': 'MonitorComputeIdentityBase'}, - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_identity": {"key": "computeIdentity", "type": "MonitorComputeIdentityBase"}, + "instance_type": {"key": "instanceType", "type": "str"}, + "runtime_version": {"key": "runtimeVersion", "type": "str"}, } def __init__( self, *, - compute_identity: "MonitorComputeIdentityBase", + compute_identity: "_models.MonitorComputeIdentityBase", instance_type: str, runtime_version: str, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword compute_identity: Required. [Required] The identity scheme leveraged to by the spark - jobs running on serverless Spark. + :keyword compute_identity: [Required] The identity scheme leveraged to by the spark jobs + running on serverless Spark. Required. :paramtype compute_identity: ~azure.mgmt.machinelearningservices.models.MonitorComputeIdentityBase - :keyword instance_type: Required. [Required] The instance type running the Spark job. + :keyword instance_type: [Required] The instance type running the Spark job. Required. :paramtype instance_type: str - :keyword runtime_version: Required. [Required] The Spark runtime version. + :keyword runtime_version: [Required] The Spark runtime version. Required. :paramtype runtime_version: str """ - super(MonitorServerlessSparkCompute, self).__init__(**kwargs) - self.compute_type = 'ServerlessSpark' # type: str + super().__init__(**kwargs) + self.compute_type: str = "ServerlessSpark" self.compute_identity = compute_identity self.instance_type = instance_type self.runtime_version = runtime_version @@ -22231,39 +22068,33 @@ class Mpi(DistributionConfiguration): All required parameters must be populated in order to send to Azure. - :ivar distribution_type: Required. [Required] Specifies the type of distribution - framework.Constant filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi", - "Ray". + :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. + Known values are: "PyTorch", "TensorFlow", "Mpi", and "Ray". :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType :ivar process_count_per_instance: Number of processes per MPI node. :vartype process_count_per_instance: int """ _validation = { - 'distribution_type': {'required': True}, + "distribution_type": {"required": True}, } _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, - 'process_count_per_instance': {'key': 'processCountPerInstance', 'type': 'int'}, + "distribution_type": {"key": "distributionType", "type": "str"}, + "process_count_per_instance": {"key": "processCountPerInstance", "type": "int"}, } - def __init__( - self, - *, - process_count_per_instance: Optional[int] = None, - **kwargs - ): + def __init__(self, *, process_count_per_instance: Optional[int] = None, **kwargs: Any) -> None: """ :keyword process_count_per_instance: Number of processes per MPI node. :paramtype process_count_per_instance: int """ - super(Mpi, self).__init__(**kwargs) - self.distribution_type = 'Mpi' # type: str + super().__init__(**kwargs) + self.distribution_type: str = "Mpi" self.process_count_per_instance = process_count_per_instance -class NlpFixedParameters(msrest.serialization.Model): +class NlpFixedParameters(_serialization.Model): """Fixed training parameters that won't be swept over during AutoML NLP training. :ivar gradient_accumulation_steps: Number of steps to accumulate gradients over before running @@ -22272,8 +22103,8 @@ class NlpFixedParameters(msrest.serialization.Model): :ivar learning_rate: The learning rate for the training procedure. :vartype learning_rate: float :ivar learning_rate_scheduler: The type of learning rate schedule to use during the training - procedure. Possible values include: "None", "Linear", "Cosine", "CosineWithRestarts", - "Polynomial", "Constant", "ConstantWithWarmup". + procedure. Known values are: "None", "Linear", "Cosine", "CosineWithRestarts", "Polynomial", + "Constant", and "ConstantWithWarmup". :vartype learning_rate_scheduler: str or ~azure.mgmt.machinelearningservices.models.NlpLearningRateScheduler :ivar model_name: The name of the model to train. @@ -22291,15 +22122,15 @@ class NlpFixedParameters(msrest.serialization.Model): """ _attribute_map = { - 'gradient_accumulation_steps': {'key': 'gradientAccumulationSteps', 'type': 'int'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'int'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'int'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'int'}, - 'warmup_ratio': {'key': 'warmupRatio', 'type': 'float'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'float'}, + "gradient_accumulation_steps": {"key": "gradientAccumulationSteps", "type": "int"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "int"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "int"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "int"}, + "warmup_ratio": {"key": "warmupRatio", "type": "float"}, + "weight_decay": {"key": "weightDecay", "type": "float"}, } def __init__( @@ -22307,15 +22138,15 @@ def __init__( *, gradient_accumulation_steps: Optional[int] = None, learning_rate: Optional[float] = None, - learning_rate_scheduler: Optional[Union[str, "NlpLearningRateScheduler"]] = None, + learning_rate_scheduler: Optional[Union[str, "_models.NlpLearningRateScheduler"]] = None, model_name: Optional[str] = None, number_of_epochs: Optional[int] = None, training_batch_size: Optional[int] = None, validation_batch_size: Optional[int] = None, warmup_ratio: Optional[float] = None, weight_decay: Optional[float] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword gradient_accumulation_steps: Number of steps to accumulate gradients over before running a backward pass. @@ -22323,8 +22154,8 @@ def __init__( :keyword learning_rate: The learning rate for the training procedure. :paramtype learning_rate: float :keyword learning_rate_scheduler: The type of learning rate schedule to use during the training - procedure. Possible values include: "None", "Linear", "Cosine", "CosineWithRestarts", - "Polynomial", "Constant", "ConstantWithWarmup". + procedure. Known values are: "None", "Linear", "Cosine", "CosineWithRestarts", "Polynomial", + "Constant", and "ConstantWithWarmup". :paramtype learning_rate_scheduler: str or ~azure.mgmt.machinelearningservices.models.NlpLearningRateScheduler :keyword model_name: The name of the model to train. @@ -22340,7 +22171,7 @@ def __init__( :keyword weight_decay: The weight decay for the training procedure. :paramtype weight_decay: float """ - super(NlpFixedParameters, self).__init__(**kwargs) + super().__init__(**kwargs) self.gradient_accumulation_steps = gradient_accumulation_steps self.learning_rate = learning_rate self.learning_rate_scheduler = learning_rate_scheduler @@ -22352,7 +22183,7 @@ def __init__( self.weight_decay = weight_decay -class NlpParameterSubspace(msrest.serialization.Model): +class NlpParameterSubspace(_serialization.Model): """Stringified search spaces for each parameter. See below examples. :ivar gradient_accumulation_steps: Number of steps to accumulate gradients over before running @@ -22378,15 +22209,15 @@ class NlpParameterSubspace(msrest.serialization.Model): """ _attribute_map = { - 'gradient_accumulation_steps': {'key': 'gradientAccumulationSteps', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'learning_rate_scheduler': {'key': 'learningRateScheduler', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'number_of_epochs': {'key': 'numberOfEpochs', 'type': 'str'}, - 'training_batch_size': {'key': 'trainingBatchSize', 'type': 'str'}, - 'validation_batch_size': {'key': 'validationBatchSize', 'type': 'str'}, - 'warmup_ratio': {'key': 'warmupRatio', 'type': 'str'}, - 'weight_decay': {'key': 'weightDecay', 'type': 'str'}, + "gradient_accumulation_steps": {"key": "gradientAccumulationSteps", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "learning_rate_scheduler": {"key": "learningRateScheduler", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "number_of_epochs": {"key": "numberOfEpochs", "type": "str"}, + "training_batch_size": {"key": "trainingBatchSize", "type": "str"}, + "validation_batch_size": {"key": "validationBatchSize", "type": "str"}, + "warmup_ratio": {"key": "warmupRatio", "type": "str"}, + "weight_decay": {"key": "weightDecay", "type": "str"}, } def __init__( @@ -22401,8 +22232,8 @@ def __init__( validation_batch_size: Optional[str] = None, warmup_ratio: Optional[str] = None, weight_decay: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword gradient_accumulation_steps: Number of steps to accumulate gradients over before running a backward pass. @@ -22425,7 +22256,7 @@ def __init__( :keyword weight_decay: The weight decay for the training procedure. :paramtype weight_decay: str """ - super(NlpParameterSubspace, self).__init__(**kwargs) + super().__init__(**kwargs) self.gradient_accumulation_steps = gradient_accumulation_steps self.learning_rate = learning_rate self.learning_rate_scheduler = learning_rate_scheduler @@ -22437,51 +22268,51 @@ def __init__( self.weight_decay = weight_decay -class NlpSweepSettings(msrest.serialization.Model): +class NlpSweepSettings(_serialization.Model): """Model sweeping and hyperparameter tuning related settings. All required parameters must be populated in order to send to Azure. :ivar early_termination: Type of early termination policy for the sweeping job. :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :ivar sampling_algorithm: Required. [Required] Type of sampling algorithm. Possible values - include: "Grid", "Random", "Bayesian". + :ivar sampling_algorithm: [Required] Type of sampling algorithm. Required. Known values are: + "Grid", "Random", and "Bayesian". :vartype sampling_algorithm: str or ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ _validation = { - 'sampling_algorithm': {'required': True}, + "sampling_algorithm": {"required": True}, } _attribute_map = { - 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, - 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'}, + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "str"}, } def __init__( self, *, - sampling_algorithm: Union[str, "SamplingAlgorithmType"], - early_termination: Optional["EarlyTerminationPolicy"] = None, - **kwargs - ): + sampling_algorithm: Union[str, "_models.SamplingAlgorithmType"], + early_termination: Optional["_models.EarlyTerminationPolicy"] = None, + **kwargs: Any + ) -> None: """ :keyword early_termination: Type of early termination policy for the sweeping job. :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :keyword sampling_algorithm: Required. [Required] Type of sampling algorithm. Possible values - include: "Grid", "Random", "Bayesian". + :keyword sampling_algorithm: [Required] Type of sampling algorithm. Required. Known values are: + "Grid", "Random", and "Bayesian". :paramtype sampling_algorithm: str or ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ - super(NlpSweepSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.early_termination = early_termination self.sampling_algorithm = sampling_algorithm -class NlpVertical(msrest.serialization.Model): +class NlpVertical(_serialization.Model): """Abstract class for NLP related AutoML tasks. -NLP - Natural Language Processing. + NLP - Natural Language Processing. :ivar featurization_settings: Featurization inputs needed for AutoML job. :vartype featurization_settings: @@ -22501,25 +22332,25 @@ class NlpVertical(msrest.serialization.Model): """ _attribute_map = { - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'NlpFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, - 'search_space': {'key': 'searchSpace', 'type': '[NlpParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'NlpSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, + "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, } def __init__( self, *, - featurization_settings: Optional["NlpVerticalFeaturizationSettings"] = None, - fixed_parameters: Optional["NlpFixedParameters"] = None, - limit_settings: Optional["NlpVerticalLimitSettings"] = None, - search_space: Optional[List["NlpParameterSubspace"]] = None, - sweep_settings: Optional["NlpSweepSettings"] = None, - validation_data: Optional["MLTableJobInput"] = None, - **kwargs - ): + featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.NlpFixedParameters"] = None, + limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + search_space: Optional[List["_models.NlpParameterSubspace"]] = None, + sweep_settings: Optional["_models.NlpSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + **kwargs: Any + ) -> None: """ :keyword featurization_settings: Featurization inputs needed for AutoML job. :paramtype featurization_settings: @@ -22537,7 +22368,7 @@ def __init__( :keyword validation_data: Validation data inputs. :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ - super(NlpVertical, self).__init__(**kwargs) + super().__init__(**kwargs) self.featurization_settings = featurization_settings self.fixed_parameters = fixed_parameters self.limit_settings = limit_settings @@ -22554,23 +22385,18 @@ class NlpVerticalFeaturizationSettings(FeaturizationSettings): """ _attribute_map = { - 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, + "dataset_language": {"key": "datasetLanguage", "type": "str"}, } - def __init__( - self, - *, - dataset_language: Optional[str] = None, - **kwargs - ): + def __init__(self, *, dataset_language: Optional[str] = None, **kwargs: Any) -> None: """ :keyword dataset_language: Dataset language, useful for the text data. :paramtype dataset_language: str """ - super(NlpVerticalFeaturizationSettings, self).__init__(dataset_language=dataset_language, **kwargs) + super().__init__(dataset_language=dataset_language, **kwargs) -class NlpVerticalLimitSettings(msrest.serialization.Model): +class NlpVerticalLimitSettings(_serialization.Model): """Job execution constraints. :ivar max_concurrent_trials: Maximum Concurrent AutoML iterations. @@ -22586,23 +22412,23 @@ class NlpVerticalLimitSettings(msrest.serialization.Model): """ _attribute_map = { - 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, - 'max_nodes': {'key': 'maxNodes', 'type': 'int'}, - 'max_trials': {'key': 'maxTrials', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, - 'trial_timeout': {'key': 'trialTimeout', 'type': 'duration'}, + "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, + "max_nodes": {"key": "maxNodes", "type": "int"}, + "max_trials": {"key": "maxTrials", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, + "trial_timeout": {"key": "trialTimeout", "type": "duration"}, } def __init__( self, *, - max_concurrent_trials: Optional[int] = 1, - max_nodes: Optional[int] = 1, - max_trials: Optional[int] = 1, - timeout: Optional[datetime.timedelta] = "P7D", + max_concurrent_trials: int = 1, + max_nodes: int = 1, + max_trials: int = 1, + timeout: datetime.timedelta = "P7D", trial_timeout: Optional[datetime.timedelta] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword max_concurrent_trials: Maximum Concurrent AutoML iterations. :paramtype max_concurrent_trials: int @@ -22615,7 +22441,7 @@ def __init__( :keyword trial_timeout: Timeout for individual HD trials. :paramtype trial_timeout: ~datetime.timedelta """ - super(NlpVerticalLimitSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.max_concurrent_trials = max_concurrent_trials self.max_nodes = max_nodes self.max_trials = max_trials @@ -22623,7 +22449,7 @@ def __init__( self.trial_timeout = trial_timeout -class NodeStateCounts(msrest.serialization.Model): +class NodeStateCounts(_serialization.Model): """Counts of various compute node states on the amlCompute. Variables are only populated by the server, and will be ignored when sending a request. @@ -22643,30 +22469,26 @@ class NodeStateCounts(msrest.serialization.Model): """ _validation = { - 'idle_node_count': {'readonly': True}, - 'running_node_count': {'readonly': True}, - 'preparing_node_count': {'readonly': True}, - 'unusable_node_count': {'readonly': True}, - 'leaving_node_count': {'readonly': True}, - 'preempted_node_count': {'readonly': True}, + "idle_node_count": {"readonly": True}, + "running_node_count": {"readonly": True}, + "preparing_node_count": {"readonly": True}, + "unusable_node_count": {"readonly": True}, + "leaving_node_count": {"readonly": True}, + "preempted_node_count": {"readonly": True}, } _attribute_map = { - 'idle_node_count': {'key': 'idleNodeCount', 'type': 'int'}, - 'running_node_count': {'key': 'runningNodeCount', 'type': 'int'}, - 'preparing_node_count': {'key': 'preparingNodeCount', 'type': 'int'}, - 'unusable_node_count': {'key': 'unusableNodeCount', 'type': 'int'}, - 'leaving_node_count': {'key': 'leavingNodeCount', 'type': 'int'}, - 'preempted_node_count': {'key': 'preemptedNodeCount', 'type': 'int'}, + "idle_node_count": {"key": "idleNodeCount", "type": "int"}, + "running_node_count": {"key": "runningNodeCount", "type": "int"}, + "preparing_node_count": {"key": "preparingNodeCount", "type": "int"}, + "unusable_node_count": {"key": "unusableNodeCount", "type": "int"}, + "leaving_node_count": {"key": "leavingNodeCount", "type": "int"}, + "preempted_node_count": {"key": "preemptedNodeCount", "type": "int"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(NodeStateCounts, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.idle_node_count = None self.running_node_count = None self.preparing_node_count = None @@ -22678,61 +22500,82 @@ def __init__( class NoneAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """NoneAuthTypeWorkspaceConnectionProperties. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", + :ivar category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar created_by_workspace_arm_id: The arm id of the workspace which created this connection. + :vartype created_by_workspace_arm_id: str :ivar expiry_time: :vartype expiry_time: ~datetime.datetime + :ivar is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :vartype is_shared_to_all: bool :ivar metadata: Any object. - :vartype metadata: any + :vartype metadata: JSON :ivar target: :vartype target: str """ _validation = { - 'auth_type': {'required': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, } def __init__( self, *, - category: Optional[Union[str, "ConnectionCategory"]] = None, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, expiry_time: Optional[datetime.datetime] = None, - metadata: Optional[Any] = None, + is_shared_to_all: Optional[bool] = None, + metadata: Optional[JSON] = None, target: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", + :keyword category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime + :keyword is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :paramtype is_shared_to_all: bool :keyword metadata: Any object. - :paramtype metadata: any + :paramtype metadata: JSON :keyword target: :paramtype target: str """ - super(NoneAuthTypeWorkspaceConnectionProperties, self).__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) - self.auth_type = 'None' # type: str + super().__init__( + category=category, + expiry_time=expiry_time, + is_shared_to_all=is_shared_to_all, + metadata=metadata, + target=target, + **kwargs + ) + self.auth_type: str = "None" class NoneDatastoreCredentials(DatastoreCredentials): @@ -22740,31 +22583,27 @@ class NoneDatastoreCredentials(DatastoreCredentials): All required parameters must be populated in order to send to Azure. - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType """ _validation = { - 'credentials_type': {'required': True}, + "credentials_type": {"required": True}, } _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(NoneDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'None' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.credentials_type: str = "None" -class NotebookAccessTokenResult(msrest.serialization.Model): +class NotebookAccessTokenResult(_serialization.Model): """NotebookAccessTokenResult. Variables are only populated by the server, and will be ignored when sending a request. @@ -22788,34 +22627,30 @@ class NotebookAccessTokenResult(msrest.serialization.Model): """ _validation = { - 'access_token': {'readonly': True}, - 'expires_in': {'readonly': True}, - 'host_name': {'readonly': True}, - 'notebook_resource_id': {'readonly': True}, - 'public_dns': {'readonly': True}, - 'refresh_token': {'readonly': True}, - 'scope': {'readonly': True}, - 'token_type': {'readonly': True}, + "access_token": {"readonly": True}, + "expires_in": {"readonly": True}, + "host_name": {"readonly": True}, + "notebook_resource_id": {"readonly": True}, + "public_dns": {"readonly": True}, + "refresh_token": {"readonly": True}, + "scope": {"readonly": True}, + "token_type": {"readonly": True}, } _attribute_map = { - 'access_token': {'key': 'accessToken', 'type': 'str'}, - 'expires_in': {'key': 'expiresIn', 'type': 'int'}, - 'host_name': {'key': 'hostName', 'type': 'str'}, - 'notebook_resource_id': {'key': 'notebookResourceId', 'type': 'str'}, - 'public_dns': {'key': 'publicDns', 'type': 'str'}, - 'refresh_token': {'key': 'refreshToken', 'type': 'str'}, - 'scope': {'key': 'scope', 'type': 'str'}, - 'token_type': {'key': 'tokenType', 'type': 'str'}, + "access_token": {"key": "accessToken", "type": "str"}, + "expires_in": {"key": "expiresIn", "type": "int"}, + "host_name": {"key": "hostName", "type": "str"}, + "notebook_resource_id": {"key": "notebookResourceId", "type": "str"}, + "public_dns": {"key": "publicDns", "type": "str"}, + "refresh_token": {"key": "refreshToken", "type": "str"}, + "scope": {"key": "scope", "type": "str"}, + "token_type": {"key": "tokenType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(NotebookAccessTokenResult, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.access_token = None self.expires_in = None self.host_name = None @@ -22826,7 +22661,7 @@ def __init__( self.token_type = None -class NotebookPreparationError(msrest.serialization.Model): +class NotebookPreparationError(_serialization.Model): """NotebookPreparationError. :ivar error_message: @@ -22836,29 +22671,25 @@ class NotebookPreparationError(msrest.serialization.Model): """ _attribute_map = { - 'error_message': {'key': 'errorMessage', 'type': 'str'}, - 'status_code': {'key': 'statusCode', 'type': 'int'}, + "error_message": {"key": "errorMessage", "type": "str"}, + "status_code": {"key": "statusCode", "type": "int"}, } def __init__( - self, - *, - error_message: Optional[str] = None, - status_code: Optional[int] = None, - **kwargs - ): + self, *, error_message: Optional[str] = None, status_code: Optional[int] = None, **kwargs: Any + ) -> None: """ :keyword error_message: :paramtype error_message: str :keyword status_code: :paramtype status_code: int """ - super(NotebookPreparationError, self).__init__(**kwargs) + super().__init__(**kwargs) self.error_message = error_message self.status_code = status_code -class NotebookResourceInfo(msrest.serialization.Model): +class NotebookResourceInfo(_serialization.Model): """NotebookResourceInfo. :ivar fqdn: @@ -22873,10 +22704,10 @@ class NotebookResourceInfo(msrest.serialization.Model): """ _attribute_map = { - 'fqdn': {'key': 'fqdn', 'type': 'str'}, - 'is_private_link_enabled': {'key': 'isPrivateLinkEnabled', 'type': 'bool'}, - 'notebook_preparation_error': {'key': 'notebookPreparationError', 'type': 'NotebookPreparationError'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, + "fqdn": {"key": "fqdn", "type": "str"}, + "is_private_link_enabled": {"key": "isPrivateLinkEnabled", "type": "bool"}, + "notebook_preparation_error": {"key": "notebookPreparationError", "type": "NotebookPreparationError"}, + "resource_id": {"key": "resourceId", "type": "str"}, } def __init__( @@ -22884,10 +22715,10 @@ def __init__( *, fqdn: Optional[str] = None, is_private_link_enabled: Optional[bool] = None, - notebook_preparation_error: Optional["NotebookPreparationError"] = None, + notebook_preparation_error: Optional["_models.NotebookPreparationError"] = None, resource_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword fqdn: :paramtype fqdn: str @@ -22899,14 +22730,14 @@ def __init__( :keyword resource_id: the data plane resourceId that used to initialize notebook component. :paramtype resource_id: str """ - super(NotebookResourceInfo, self).__init__(**kwargs) + super().__init__(**kwargs) self.fqdn = fqdn self.is_private_link_enabled = is_private_link_enabled self.notebook_preparation_error = notebook_preparation_error self.resource_id = resource_id -class NotificationSetting(msrest.serialization.Model): +class NotificationSetting(_serialization.Model): """Configuration for notification. :ivar email_on: Send email notification to user on specified notification type. @@ -22921,19 +22752,19 @@ class NotificationSetting(msrest.serialization.Model): """ _attribute_map = { - 'email_on': {'key': 'emailOn', 'type': '[str]'}, - 'emails': {'key': 'emails', 'type': '[str]'}, - 'webhooks': {'key': 'webhooks', 'type': '{Webhook}'}, + "email_on": {"key": "emailOn", "type": "[str]"}, + "emails": {"key": "emails", "type": "[str]"}, + "webhooks": {"key": "webhooks", "type": "{Webhook}"}, } def __init__( self, *, - email_on: Optional[List[Union[str, "EmailNotificationEnableType"]]] = None, + email_on: Optional[List[Union[str, "_models.EmailNotificationEnableType"]]] = None, emails: Optional[List[str]] = None, - webhooks: Optional[Dict[str, "Webhook"]] = None, - **kwargs - ): + webhooks: Optional[Dict[str, "_models.Webhook"]] = None, + **kwargs: Any + ) -> None: """ :keyword email_on: Send email notification to user on specified notification type. :paramtype email_on: list[str or @@ -22945,7 +22776,7 @@ def __init__( webhook. :paramtype webhooks: dict[str, ~azure.mgmt.machinelearningservices.models.Webhook] """ - super(NotificationSetting, self).__init__(**kwargs) + super().__init__(**kwargs) self.email_on = email_on self.emails = emails self.webhooks = webhooks @@ -22956,47 +22787,47 @@ class NumericalDataDriftMetricThreshold(DataDriftMetricThresholdBase): All required parameters must be populated in order to send to Azure. - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType :ivar threshold: The threshold value. If null, a default value will be set depending on the selected metric. :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The numerical data drift metric to calculate. Possible - values include: "JensenShannonDistance", "PopulationStabilityIndex", - "NormalizedWassersteinDistance", "TwoSampleKolmogorovSmirnovTest". + :ivar metric: [Required] The numerical data drift metric to calculate. Required. Known values + are: "JensenShannonDistance", "PopulationStabilityIndex", "NormalizedWassersteinDistance", and + "TwoSampleKolmogorovSmirnovTest". :vartype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalDataDriftMetric """ _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, + "data_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } def __init__( self, *, - metric: Union[str, "NumericalDataDriftMetric"], - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + metric: Union[str, "_models.NumericalDataDriftMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The numerical data drift metric to calculate. Possible - values include: "JensenShannonDistance", "PopulationStabilityIndex", - "NormalizedWassersteinDistance", "TwoSampleKolmogorovSmirnovTest". + :keyword metric: [Required] The numerical data drift metric to calculate. Required. Known + values are: "JensenShannonDistance", "PopulationStabilityIndex", + "NormalizedWassersteinDistance", and "TwoSampleKolmogorovSmirnovTest". :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalDataDriftMetric """ - super(NumericalDataDriftMetricThreshold, self).__init__(threshold=threshold, **kwargs) - self.data_type = 'Numerical' # type: str + super().__init__(threshold=threshold, **kwargs) + self.data_type: str = "Numerical" self.metric = metric @@ -23005,45 +22836,45 @@ class NumericalDataQualityMetricThreshold(DataQualityMetricThresholdBase): All required parameters must be populated in order to send to Azure. - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType :ivar threshold: The threshold value. If null, a default value will be set depending on the selected metric. :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The numerical data quality metric to calculate. Possible - values include: "NullValueRate", "DataTypeErrorRate", "OutOfBoundsRate". + :ivar metric: [Required] The numerical data quality metric to calculate. Required. Known values + are: "NullValueRate", "DataTypeErrorRate", and "OutOfBoundsRate". :vartype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalDataQualityMetric """ _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, + "data_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } def __init__( self, *, - metric: Union[str, "NumericalDataQualityMetric"], - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + metric: Union[str, "_models.NumericalDataQualityMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The numerical data quality metric to calculate. Possible - values include: "NullValueRate", "DataTypeErrorRate", "OutOfBoundsRate". + :keyword metric: [Required] The numerical data quality metric to calculate. Required. Known + values are: "NullValueRate", "DataTypeErrorRate", and "OutOfBoundsRate". :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalDataQualityMetric """ - super(NumericalDataQualityMetricThreshold, self).__init__(threshold=threshold, **kwargs) - self.data_type = 'Numerical' # type: str + super().__init__(threshold=threshold, **kwargs) + self.data_type: str = "Numerical" self.metric = metric @@ -23052,94 +22883,88 @@ class NumericalPredictionDriftMetricThreshold(PredictionDriftMetricThresholdBase All required parameters must be populated in order to send to Azure. - :ivar data_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Numerical", "Categorical". + :ivar data_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Numerical" and "Categorical". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureDataType :ivar threshold: The threshold value. If null, a default value will be set depending on the selected metric. :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The numerical prediction drift metric to calculate. Possible - values include: "JensenShannonDistance", "PopulationStabilityIndex", - "NormalizedWassersteinDistance", "TwoSampleKolmogorovSmirnovTest". + :ivar metric: [Required] The numerical prediction drift metric to calculate. Required. Known + values are: "JensenShannonDistance", "PopulationStabilityIndex", + "NormalizedWassersteinDistance", and "TwoSampleKolmogorovSmirnovTest". :vartype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalPredictionDriftMetric """ _validation = { - 'data_type': {'required': True}, - 'metric': {'required': True}, + "data_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "data_type": {"key": "dataType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } def __init__( self, *, - metric: Union[str, "NumericalPredictionDriftMetric"], - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + metric: Union[str, "_models.NumericalPredictionDriftMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The numerical prediction drift metric to calculate. - Possible values include: "JensenShannonDistance", "PopulationStabilityIndex", - "NormalizedWassersteinDistance", "TwoSampleKolmogorovSmirnovTest". + :keyword metric: [Required] The numerical prediction drift metric to calculate. Required. Known + values are: "JensenShannonDistance", "PopulationStabilityIndex", + "NormalizedWassersteinDistance", and "TwoSampleKolmogorovSmirnovTest". :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.NumericalPredictionDriftMetric """ - super(NumericalPredictionDriftMetricThreshold, self).__init__(threshold=threshold, **kwargs) - self.data_type = 'Numerical' # type: str + super().__init__(threshold=threshold, **kwargs) + self.data_type: str = "Numerical" self.metric = metric -class Objective(msrest.serialization.Model): +class Objective(_serialization.Model): """Optimization objective. All required parameters must be populated in order to send to Azure. - :ivar goal: Required. [Required] Defines supported metric goals for hyperparameter tuning. - Possible values include: "Minimize", "Maximize". + :ivar goal: [Required] Defines supported metric goals for hyperparameter tuning. Required. + Known values are: "Minimize" and "Maximize". :vartype goal: str or ~azure.mgmt.machinelearningservices.models.Goal - :ivar primary_metric: Required. [Required] Name of the metric to optimize. + :ivar primary_metric: [Required] Name of the metric to optimize. Required. :vartype primary_metric: str """ _validation = { - 'goal': {'required': True}, - 'primary_metric': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "goal": {"required": True}, + "primary_metric": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'goal': {'key': 'goal', 'type': 'str'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "goal": {"key": "goal", "type": "str"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } - def __init__( - self, - *, - goal: Union[str, "Goal"], - primary_metric: str, - **kwargs - ): + def __init__(self, *, goal: Union[str, "_models.Goal"], primary_metric: str, **kwargs: Any) -> None: """ - :keyword goal: Required. [Required] Defines supported metric goals for hyperparameter tuning. - Possible values include: "Minimize", "Maximize". + :keyword goal: [Required] Defines supported metric goals for hyperparameter tuning. Required. + Known values are: "Minimize" and "Maximize". :paramtype goal: str or ~azure.mgmt.machinelearningservices.models.Goal - :keyword primary_metric: Required. [Required] Name of the metric to optimize. + :keyword primary_metric: [Required] Name of the metric to optimize. Required. :paramtype primary_metric: str """ - super(Objective, self).__init__(**kwargs) + super().__init__(**kwargs) self.goal = goal self.primary_metric = primary_metric -class OneLakeDatastore(DatastoreProperties): +class OneLakeDatastore(DatastoreProperties): # pylint: disable=too-many-instance-attributes """OneLake (Trident) datastore configuration. Variables are only populated by the server, and will be ignored when sending a request. @@ -23150,94 +22975,100 @@ class OneLakeDatastore(DatastoreProperties): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar credentials: Required. [Required] Account credentials. + :ivar credentials: [Required] Account credentials. Required. :vartype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials - :ivar datastore_type: Required. [Required] Storage type backing the datastore.Constant filled - by server. Possible values include: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", - "AzureFile", "Hdfs", "OneLake". + :ivar datastore_type: [Required] Storage type backing the datastore. Required. Known values + are: "AzureBlob", "AzureDataLakeGen1", "AzureDataLakeGen2", "AzureFile", "Hdfs", and "OneLake". :vartype datastore_type: str or ~azure.mgmt.machinelearningservices.models.DatastoreType :ivar intellectual_property: Intellectual Property details. :vartype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty :ivar is_default: Readonly property to indicate if datastore is the workspace default datastore. :vartype is_default: bool - :ivar artifact: Required. [Required] OneLake artifact backing the datastore. + :ivar artifact: [Required] OneLake artifact backing the datastore. Required. :vartype artifact: ~azure.mgmt.machinelearningservices.models.OneLakeArtifact :ivar endpoint: OneLake endpoint to use for the datastore. :vartype endpoint: str - :ivar one_lake_workspace_name: Required. [Required] OneLake workspace name. + :ivar one_lake_workspace_name: [Required] OneLake workspace name. Required. :vartype one_lake_workspace_name: str :ivar service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". :vartype service_data_access_auth_identity: str or ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ _validation = { - 'credentials': {'required': True}, - 'datastore_type': {'required': True}, - 'is_default': {'readonly': True}, - 'artifact': {'required': True}, - 'one_lake_workspace_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "credentials": {"required": True}, + "datastore_type": {"required": True}, + "is_default": {"readonly": True}, + "artifact": {"required": True}, + "one_lake_workspace_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'credentials': {'key': 'credentials', 'type': 'DatastoreCredentials'}, - 'datastore_type': {'key': 'datastoreType', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'is_default': {'key': 'isDefault', 'type': 'bool'}, - 'artifact': {'key': 'artifact', 'type': 'OneLakeArtifact'}, - 'endpoint': {'key': 'endpoint', 'type': 'str'}, - 'one_lake_workspace_name': {'key': 'oneLakeWorkspaceName', 'type': 'str'}, - 'service_data_access_auth_identity': {'key': 'serviceDataAccessAuthIdentity', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "credentials": {"key": "credentials", "type": "DatastoreCredentials"}, + "datastore_type": {"key": "datastoreType", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "is_default": {"key": "isDefault", "type": "bool"}, + "artifact": {"key": "artifact", "type": "OneLakeArtifact"}, + "endpoint": {"key": "endpoint", "type": "str"}, + "one_lake_workspace_name": {"key": "oneLakeWorkspaceName", "type": "str"}, + "service_data_access_auth_identity": {"key": "serviceDataAccessAuthIdentity", "type": "str"}, } def __init__( self, *, - credentials: "DatastoreCredentials", - artifact: "OneLakeArtifact", + credentials: "_models.DatastoreCredentials", + artifact: "_models.OneLakeArtifact", one_lake_workspace_name: str, description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - intellectual_property: Optional["IntellectualProperty"] = None, + intellectual_property: Optional["_models.IntellectualProperty"] = None, endpoint: Optional[str] = None, - service_data_access_auth_identity: Optional[Union[str, "ServiceDataAccessAuthIdentity"]] = None, - **kwargs - ): + service_data_access_auth_identity: Optional[Union[str, "_models.ServiceDataAccessAuthIdentity"]] = None, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword credentials: Required. [Required] Account credentials. + :keyword credentials: [Required] Account credentials. Required. :paramtype credentials: ~azure.mgmt.machinelearningservices.models.DatastoreCredentials :keyword intellectual_property: Intellectual Property details. :paramtype intellectual_property: ~azure.mgmt.machinelearningservices.models.IntellectualProperty - :keyword artifact: Required. [Required] OneLake artifact backing the datastore. + :keyword artifact: [Required] OneLake artifact backing the datastore. Required. :paramtype artifact: ~azure.mgmt.machinelearningservices.models.OneLakeArtifact :keyword endpoint: OneLake endpoint to use for the datastore. :paramtype endpoint: str - :keyword one_lake_workspace_name: Required. [Required] OneLake workspace name. + :keyword one_lake_workspace_name: [Required] OneLake workspace name. Required. :paramtype one_lake_workspace_name: str :keyword service_data_access_auth_identity: Indicates which identity to use to authenticate - service data access to customer's storage. Possible values include: "None", - "WorkspaceSystemAssignedIdentity", "WorkspaceUserAssignedIdentity". + service data access to customer's storage. Known values are: "None", + "WorkspaceSystemAssignedIdentity", and "WorkspaceUserAssignedIdentity". :paramtype service_data_access_auth_identity: str or ~azure.mgmt.machinelearningservices.models.ServiceDataAccessAuthIdentity """ - super(OneLakeDatastore, self).__init__(description=description, properties=properties, tags=tags, credentials=credentials, intellectual_property=intellectual_property, **kwargs) - self.datastore_type = 'OneLake' # type: str + super().__init__( + description=description, + properties=properties, + tags=tags, + credentials=credentials, + intellectual_property=intellectual_property, + **kwargs + ) + self.datastore_type: str = "OneLake" self.artifact = artifact self.endpoint = endpoint self.one_lake_workspace_name = one_lake_workspace_name @@ -23262,77 +23093,77 @@ class OnlineDeployment(TrackedResource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. + :ivar location: The geo-location where the resource lives. Required. :vartype location: str :ivar identity: Managed service identity (system assigned and/or user assigned identities). :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type. :vartype kind: str - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.OnlineDeploymentProperties :ivar sku: Sku details required for ARM contract for Autoscaling. :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'OnlineDeploymentProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "OnlineDeploymentProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } def __init__( self, *, location: str, - properties: "OnlineDeploymentProperties", + properties: "_models.OnlineDeploymentProperties", tags: Optional[Dict[str, str]] = None, - identity: Optional["ManagedServiceIdentity"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, kind: Optional[str] = None, - sku: Optional["Sku"] = None, - **kwargs - ): + sku: Optional["_models.Sku"] = None, + **kwargs: Any + ) -> None: """ - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. + :keyword location: The geo-location where the resource lives. Required. :paramtype location: str :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type. :paramtype kind: str - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.OnlineDeploymentProperties :keyword sku: Sku details required for ARM contract for Autoscaling. :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ - super(OnlineDeployment, self).__init__(tags=tags, location=location, **kwargs) + super().__init__(tags=tags, location=location, **kwargs) self.identity = identity self.kind = kind self.properties = properties self.sku = sku -class OnlineDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Model): +class OnlineDeploymentTrackedResourceArmPaginatedResult(_serialization.Model): """A paginated list of OnlineDeployment entities. :ivar next_link: The link to the next page of OnlineDeployment objects. If null, there are no @@ -23343,17 +23174,17 @@ class OnlineDeploymentTrackedResourceArmPaginatedResult(msrest.serialization.Mod """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[OnlineDeployment]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[OnlineDeployment]"}, } def __init__( self, *, next_link: Optional[str] = None, - value: Optional[List["OnlineDeployment"]] = None, - **kwargs - ): + value: Optional[List["_models.OnlineDeployment"]] = None, + **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of OnlineDeployment objects. If null, there are no additional pages. @@ -23361,7 +23192,7 @@ def __init__( :keyword value: An array of objects of type OnlineDeployment. :paramtype value: list[~azure.mgmt.machinelearningservices.models.OnlineDeployment] """ - super(OnlineDeploymentTrackedResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value @@ -23384,86 +23215,86 @@ class OnlineEndpoint(TrackedResource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. + :ivar location: The geo-location where the resource lives. Required. :vartype location: str :ivar identity: Managed service identity (system assigned and/or user assigned identities). :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type. :vartype kind: str - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.OnlineEndpointProperties :ivar sku: Sku details required for ARM contract for Autoscaling. :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'OnlineEndpointProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "OnlineEndpointProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } def __init__( self, *, location: str, - properties: "OnlineEndpointProperties", + properties: "_models.OnlineEndpointProperties", tags: Optional[Dict[str, str]] = None, - identity: Optional["ManagedServiceIdentity"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, kind: Optional[str] = None, - sku: Optional["Sku"] = None, - **kwargs - ): + sku: Optional["_models.Sku"] = None, + **kwargs: Any + ) -> None: """ - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. + :keyword location: The geo-location where the resource lives. Required. :paramtype location: str :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type. :paramtype kind: str - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.OnlineEndpointProperties :keyword sku: Sku details required for ARM contract for Autoscaling. :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ - super(OnlineEndpoint, self).__init__(tags=tags, location=location, **kwargs) + super().__init__(tags=tags, location=location, **kwargs) self.identity = identity self.kind = kind self.properties = properties self.sku = sku -class OnlineEndpointProperties(EndpointPropertiesBase): +class OnlineEndpointProperties(EndpointPropertiesBase): # pylint: disable=too-many-instance-attributes """Online endpoint configuration. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' for - Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. - Possible values include: "AMLToken", "Key", "AADToken". + :ivar auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure + Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Required. Known values are: "AMLToken", "Key", and "AADToken". :vartype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode :ivar description: Description of the inference endpoint. :vartype description: str @@ -23483,12 +23314,12 @@ class OnlineEndpointProperties(EndpointPropertiesBase): :ivar mirror_traffic: Percentage of traffic to be mirrored to each deployment without using returned scoring. Traffic values need to sum to utmost 50. :vartype mirror_traffic: dict[str, int] - :ivar provisioning_state: Provisioning state for the endpoint. Possible values include: - "Creating", "Deleting", "Succeeded", "Failed", "Updating", "Canceled". + :ivar provisioning_state: Provisioning state for the endpoint. Known values are: "Creating", + "Deleting", "Succeeded", "Failed", "Updating", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState :ivar public_network_access: Set to "Enabled" for endpoints that should allow public access - when Private Link is enabled. Possible values include: "Enabled", "Disabled". + when Private Link is enabled. Known values are: "Enabled" and "Disabled". :vartype public_network_access: str or ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType :ivar traffic: Percentage of traffic from endpoint to divert to each deployment. Traffic values @@ -23497,43 +23328,43 @@ class OnlineEndpointProperties(EndpointPropertiesBase): """ _validation = { - 'auth_mode': {'required': True}, - 'scoring_uri': {'readonly': True}, - 'swagger_uri': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "auth_mode": {"required": True}, + "scoring_uri": {"readonly": True}, + "swagger_uri": {"readonly": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'auth_mode': {'key': 'authMode', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'keys': {'key': 'keys', 'type': 'EndpointAuthKeys'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'scoring_uri': {'key': 'scoringUri', 'type': 'str'}, - 'swagger_uri': {'key': 'swaggerUri', 'type': 'str'}, - 'compute': {'key': 'compute', 'type': 'str'}, - 'mirror_traffic': {'key': 'mirrorTraffic', 'type': '{int}'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'public_network_access': {'key': 'publicNetworkAccess', 'type': 'str'}, - 'traffic': {'key': 'traffic', 'type': '{int}'}, + "auth_mode": {"key": "authMode", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "keys": {"key": "keys", "type": "EndpointAuthKeys"}, + "properties": {"key": "properties", "type": "{str}"}, + "scoring_uri": {"key": "scoringUri", "type": "str"}, + "swagger_uri": {"key": "swaggerUri", "type": "str"}, + "compute": {"key": "compute", "type": "str"}, + "mirror_traffic": {"key": "mirrorTraffic", "type": "{int}"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "public_network_access": {"key": "publicNetworkAccess", "type": "str"}, + "traffic": {"key": "traffic", "type": "{int}"}, } def __init__( self, *, - auth_mode: Union[str, "EndpointAuthMode"], + auth_mode: Union[str, "_models.EndpointAuthMode"], description: Optional[str] = None, - keys: Optional["EndpointAuthKeys"] = None, + keys: Optional["_models.EndpointAuthKeys"] = None, properties: Optional[Dict[str, str]] = None, compute: Optional[str] = None, mirror_traffic: Optional[Dict[str, int]] = None, - public_network_access: Optional[Union[str, "PublicNetworkAccessType"]] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccessType"]] = None, traffic: Optional[Dict[str, int]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword auth_mode: Required. [Required] Use 'Key' for key based authentication and 'AMLToken' - for Azure Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' - does. Possible values include: "AMLToken", "Key", "AADToken". + :keyword auth_mode: [Required] Use 'Key' for key based authentication and 'AMLToken' for Azure + Machine Learning token-based authentication. 'Key' doesn't expire but 'AMLToken' does. + Required. Known values are: "AMLToken", "Key", and "AADToken". :paramtype auth_mode: str or ~azure.mgmt.machinelearningservices.models.EndpointAuthMode :keyword description: Description of the inference endpoint. :paramtype description: str @@ -23550,14 +23381,14 @@ def __init__( returned scoring. Traffic values need to sum to utmost 50. :paramtype mirror_traffic: dict[str, int] :keyword public_network_access: Set to "Enabled" for endpoints that should allow public access - when Private Link is enabled. Possible values include: "Enabled", "Disabled". + when Private Link is enabled. Known values are: "Enabled" and "Disabled". :paramtype public_network_access: str or ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType :keyword traffic: Percentage of traffic from endpoint to divert to each deployment. Traffic values need to sum to 100. :paramtype traffic: dict[str, int] """ - super(OnlineEndpointProperties, self).__init__(auth_mode=auth_mode, description=description, keys=keys, properties=properties, **kwargs) + super().__init__(auth_mode=auth_mode, description=description, keys=keys, properties=properties, **kwargs) self.compute = compute self.mirror_traffic = mirror_traffic self.provisioning_state = None @@ -23565,7 +23396,7 @@ def __init__( self.traffic = traffic -class OnlineEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model): +class OnlineEndpointTrackedResourceArmPaginatedResult(_serialization.Model): """A paginated list of OnlineEndpoint entities. :ivar next_link: The link to the next page of OnlineEndpoint objects. If null, there are no @@ -23576,17 +23407,13 @@ class OnlineEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[OnlineEndpoint]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[OnlineEndpoint]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["OnlineEndpoint"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.OnlineEndpoint"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of OnlineEndpoint objects. If null, there are no additional pages. @@ -23594,12 +23421,12 @@ def __init__( :keyword value: An array of objects of type OnlineEndpoint. :paramtype value: list[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] """ - super(OnlineEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class OnlineInferenceConfiguration(msrest.serialization.Model): +class OnlineInferenceConfiguration(_serialization.Model): """Online inference configuration options. :ivar configurations: Additional configurations. @@ -23616,11 +23443,11 @@ class OnlineInferenceConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'configurations': {'key': 'configurations', 'type': '{str}'}, - 'entry_script': {'key': 'entryScript', 'type': 'str'}, - 'liveness_route': {'key': 'livenessRoute', 'type': 'Route'}, - 'readiness_route': {'key': 'readinessRoute', 'type': 'Route'}, - 'scoring_route': {'key': 'scoringRoute', 'type': 'Route'}, + "configurations": {"key": "configurations", "type": "{str}"}, + "entry_script": {"key": "entryScript", "type": "str"}, + "liveness_route": {"key": "livenessRoute", "type": "Route"}, + "readiness_route": {"key": "readinessRoute", "type": "Route"}, + "scoring_route": {"key": "scoringRoute", "type": "Route"}, } def __init__( @@ -23628,11 +23455,11 @@ def __init__( *, configurations: Optional[Dict[str, str]] = None, entry_script: Optional[str] = None, - liveness_route: Optional["Route"] = None, - readiness_route: Optional["Route"] = None, - scoring_route: Optional["Route"] = None, - **kwargs - ): + liveness_route: Optional["_models.Route"] = None, + readiness_route: Optional["_models.Route"] = None, + scoring_route: Optional["_models.Route"] = None, + **kwargs: Any + ) -> None: """ :keyword configurations: Additional configurations. :paramtype configurations: dict[str, str] @@ -23646,7 +23473,7 @@ def __init__( container. :paramtype scoring_route: ~azure.mgmt.machinelearningservices.models.Route """ - super(OnlineInferenceConfiguration, self).__init__(**kwargs) + super().__init__(**kwargs) self.configurations = configurations self.entry_script = entry_script self.liveness_route = liveness_route @@ -23654,7 +23481,7 @@ def __init__( self.scoring_route = scoring_route -class OnlineRequestSettings(msrest.serialization.Model): +class OnlineRequestSettings(_serialization.Model): """Online deployment scoring requests configuration. :ivar max_concurrent_requests_per_instance: The number of maximum concurrent requests per node @@ -23670,19 +23497,19 @@ class OnlineRequestSettings(msrest.serialization.Model): """ _attribute_map = { - 'max_concurrent_requests_per_instance': {'key': 'maxConcurrentRequestsPerInstance', 'type': 'int'}, - 'max_queue_wait': {'key': 'maxQueueWait', 'type': 'duration'}, - 'request_timeout': {'key': 'requestTimeout', 'type': 'duration'}, + "max_concurrent_requests_per_instance": {"key": "maxConcurrentRequestsPerInstance", "type": "int"}, + "max_queue_wait": {"key": "maxQueueWait", "type": "duration"}, + "request_timeout": {"key": "requestTimeout", "type": "duration"}, } def __init__( self, *, - max_concurrent_requests_per_instance: Optional[int] = 1, - max_queue_wait: Optional[datetime.timedelta] = "PT0.5S", - request_timeout: Optional[datetime.timedelta] = "PT5S", - **kwargs - ): + max_concurrent_requests_per_instance: int = 1, + max_queue_wait: datetime.timedelta = "PT0.5S", + request_timeout: datetime.timedelta = "PT5S", + **kwargs: Any + ) -> None: """ :keyword max_concurrent_requests_per_instance: The number of maximum concurrent requests per node allowed per deployment. Defaults to 1. @@ -23695,65 +23522,138 @@ def __init__( Defaults to 5000ms. :paramtype request_timeout: ~datetime.timedelta """ - super(OnlineRequestSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.max_concurrent_requests_per_instance = max_concurrent_requests_per_instance self.max_queue_wait = max_queue_wait self.request_timeout = request_timeout -class OperationDisplay(msrest.serialization.Model): - """Display name of operation. +class Operation(_serialization.Model): + """Details of a REST API operation, returned from the Resource Provider Operations API. - :ivar description: Gets or sets the description for the operation. - :vartype description: str - :ivar operation: Gets or sets the operation that users can perform. - :vartype operation: str - :ivar provider: Gets or sets the resource provider name: - Microsoft.MachineLearningExperimentation. + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar name: The name of the operation, as per Resource-Based Access Control (RBAC). Examples: + "Microsoft.Compute/virtualMachines/write", "Microsoft.Compute/virtualMachines/capture/action". + :vartype name: str + :ivar is_data_action: Whether the operation applies to data-plane. This is "true" for + data-plane operations and "false" for ARM/control-plane operations. + :vartype is_data_action: bool + :ivar display: Localized display information for this particular operation. + :vartype display: ~azure.mgmt.machinelearningservices.models.OperationDisplay + :ivar origin: The intended executor of the operation; as in Resource Based Access Control + (RBAC) and audit logs UX. Default value is "user,system". Known values are: "user", "system", + and "user,system". + :vartype origin: str or ~azure.mgmt.machinelearningservices.models.Origin + :ivar action_type: Enum. Indicates the action type. "Internal" refers to actions that are for + internal only APIs. "Internal" + :vartype action_type: str or ~azure.mgmt.machinelearningservices.models.ActionType + """ + + _validation = { + "name": {"readonly": True}, + "is_data_action": {"readonly": True}, + "origin": {"readonly": True}, + "action_type": {"readonly": True}, + } + + _attribute_map = { + "name": {"key": "name", "type": "str"}, + "is_data_action": {"key": "isDataAction", "type": "bool"}, + "display": {"key": "display", "type": "OperationDisplay"}, + "origin": {"key": "origin", "type": "str"}, + "action_type": {"key": "actionType", "type": "str"}, + } + + def __init__(self, *, display: Optional["_models.OperationDisplay"] = None, **kwargs: Any) -> None: + """ + :keyword display: Localized display information for this particular operation. + :paramtype display: ~azure.mgmt.machinelearningservices.models.OperationDisplay + """ + super().__init__(**kwargs) + self.name = None + self.is_data_action = None + self.display = display + self.origin = None + self.action_type = None + + +class OperationDisplay(_serialization.Model): + """Localized display information for this particular operation. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar provider: The localized friendly form of the resource provider name, e.g. "Microsoft + Monitoring Insights" or "Microsoft Compute". :vartype provider: str - :ivar resource: Gets or sets the resource on which the operation is performed. + :ivar resource: The localized friendly name of the resource type related to this operation. + E.g. "Virtual Machines" or "Job Schedule Collections". :vartype resource: str + :ivar operation: The concise, localized friendly name for the operation; suitable for + dropdowns. E.g. "Create or Update Virtual Machine", "Restart Virtual Machine". + :vartype operation: str + :ivar description: The short, localized friendly description of the operation; suitable for + tool tips and detailed views. + :vartype description: str """ + _validation = { + "provider": {"readonly": True}, + "resource": {"readonly": True}, + "operation": {"readonly": True}, + "description": {"readonly": True}, + } + _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'operation': {'key': 'operation', 'type': 'str'}, - 'provider': {'key': 'provider', 'type': 'str'}, - 'resource': {'key': 'resource', 'type': 'str'}, + "provider": {"key": "provider", "type": "str"}, + "resource": {"key": "resource", "type": "str"}, + "operation": {"key": "operation", "type": "str"}, + "description": {"key": "description", "type": "str"}, } - def __init__( - self, - *, - description: Optional[str] = None, - operation: Optional[str] = None, - provider: Optional[str] = None, - resource: Optional[str] = None, - **kwargs - ): - """ - :keyword description: Gets or sets the description for the operation. - :paramtype description: str - :keyword operation: Gets or sets the operation that users can perform. - :paramtype operation: str - :keyword provider: Gets or sets the resource provider name: - Microsoft.MachineLearningExperimentation. - :paramtype provider: str - :keyword resource: Gets or sets the resource on which the operation is performed. - :paramtype resource: str - """ - super(OperationDisplay, self).__init__(**kwargs) - self.description = description - self.operation = operation - self.provider = provider - self.resource = resource + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.provider = None + self.resource = None + self.operation = None + self.description = None -class OsPatchingStatus(msrest.serialization.Model): +class OperationListResult(_serialization.Model): + """A list of REST API operations supported by an Azure Resource Provider. It contains an URL link + to get the next set of results. + + Variables are only populated by the server, and will be ignored when sending a request. + + :ivar value: List of operations supported by the resource provider. + :vartype value: list[~azure.mgmt.machinelearningservices.models.Operation] + :ivar next_link: URL to get the next set of operation list results (if there are any). + :vartype next_link: str + """ + + _validation = { + "value": {"readonly": True}, + "next_link": {"readonly": True}, + } + + _attribute_map = { + "value": {"key": "value", "type": "[Operation]"}, + "next_link": {"key": "nextLink", "type": "str"}, + } + + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.value = None + self.next_link = None + + +class OsPatchingStatus(_serialization.Model): """Returns metadata about the os patching. - :ivar patch_status: The os patching status. Possible values include: "CompletedWithWarnings", - "Failed", "InProgress", "Succeeded", "Unknown". + :ivar patch_status: The os patching status. Known values are: "CompletedWithWarnings", + "Failed", "InProgress", "Succeeded", and "Unknown". :vartype patch_status: str or ~azure.mgmt.machinelearningservices.models.PatchStatus :ivar latest_patch_time: Time of the latest os patching. :vartype latest_patch_time: str @@ -23765,24 +23665,24 @@ class OsPatchingStatus(msrest.serialization.Model): """ _attribute_map = { - 'patch_status': {'key': 'patchStatus', 'type': 'str'}, - 'latest_patch_time': {'key': 'latestPatchTime', 'type': 'str'}, - 'reboot_pending': {'key': 'rebootPending', 'type': 'bool'}, - 'scheduled_reboot_time': {'key': 'scheduledRebootTime', 'type': 'str'}, + "patch_status": {"key": "patchStatus", "type": "str"}, + "latest_patch_time": {"key": "latestPatchTime", "type": "str"}, + "reboot_pending": {"key": "rebootPending", "type": "bool"}, + "scheduled_reboot_time": {"key": "scheduledRebootTime", "type": "str"}, } def __init__( self, *, - patch_status: Optional[Union[str, "PatchStatus"]] = None, + patch_status: Optional[Union[str, "_models.PatchStatus"]] = None, latest_patch_time: Optional[str] = None, reboot_pending: Optional[bool] = None, scheduled_reboot_time: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword patch_status: The os patching status. Possible values include: - "CompletedWithWarnings", "Failed", "InProgress", "Succeeded", "Unknown". + :keyword patch_status: The os patching status. Known values are: "CompletedWithWarnings", + "Failed", "InProgress", "Succeeded", and "Unknown". :paramtype patch_status: str or ~azure.mgmt.machinelearningservices.models.PatchStatus :keyword latest_patch_time: Time of the latest os patching. :paramtype latest_patch_time: str @@ -23792,7 +23692,7 @@ def __init__( :keyword scheduled_reboot_time: Time of scheduled reboot. :paramtype scheduled_reboot_time: str """ - super(OsPatchingStatus, self).__init__(**kwargs) + super().__init__(**kwargs) self.patch_status = patch_status self.latest_patch_time = latest_patch_time self.reboot_pending = reboot_pending @@ -23800,7 +23700,7 @@ def __init__( class OutboundRuleBasicResource(Resource): - """Outbound Rule Basic Resource for the managed network of a machine learning workspace. + """OutboundRuleBasicResource. Variables are only populated by the server, and will be ignored when sending a request. @@ -23817,43 +23717,38 @@ class OutboundRuleBasicResource(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. Outbound Rule for the managed network of a machine learning - workspace. + :ivar properties: Outbound Rule for the managed network of a machine learning workspace. + Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.OutboundRule """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'OutboundRule'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "OutboundRule"}, } - def __init__( - self, - *, - properties: "OutboundRule", - **kwargs - ): + def __init__(self, *, properties: "_models.OutboundRule", **kwargs: Any) -> None: """ - :keyword properties: Required. Outbound Rule for the managed network of a machine learning - workspace. + :keyword properties: Outbound Rule for the managed network of a machine learning workspace. + Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.OutboundRule """ - super(OutboundRuleBasicResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class OutboundRuleListResult(msrest.serialization.Model): +class OutboundRuleListResult(_serialization.Model): """List of outbound rules for the managed network of a machine learning workspace. :ivar next_link: The link to the next page constructed using the continuationToken. If null, @@ -23865,17 +23760,17 @@ class OutboundRuleListResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[OutboundRuleBasicResource]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[OutboundRuleBasicResource]"}, } def __init__( self, *, next_link: Optional[str] = None, - value: Optional[List["OutboundRuleBasicResource"]] = None, - **kwargs - ): + value: Optional[List["_models.OutboundRuleBasicResource"]] = None, + **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. @@ -23884,7 +23779,7 @@ def __init__( nextLink field should be used to request the next list of machine learning workspaces. :paramtype value: list[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] """ - super(OutboundRuleListResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value @@ -23894,8 +23789,8 @@ class OutputPathAssetReference(AssetReferenceBase): All required parameters must be populated in order to send to Azure. - :ivar reference_type: Required. [Required] Specifies the type of asset reference.Constant - filled by server. Possible values include: "Id", "DataPath", "OutputPath". + :ivar reference_type: [Required] Specifies the type of asset reference. Required. Known values + are: "Id", "DataPath", and "OutputPath". :vartype reference_type: str or ~azure.mgmt.machinelearningservices.models.ReferenceType :ivar job_id: ARM resource ID of the job. :vartype job_id: str @@ -23904,67 +23799,61 @@ class OutputPathAssetReference(AssetReferenceBase): """ _validation = { - 'reference_type': {'required': True}, + "reference_type": {"required": True}, } _attribute_map = { - 'reference_type': {'key': 'referenceType', 'type': 'str'}, - 'job_id': {'key': 'jobId', 'type': 'str'}, - 'path': {'key': 'path', 'type': 'str'}, + "reference_type": {"key": "referenceType", "type": "str"}, + "job_id": {"key": "jobId", "type": "str"}, + "path": {"key": "path", "type": "str"}, } - def __init__( - self, - *, - job_id: Optional[str] = None, - path: Optional[str] = None, - **kwargs - ): + def __init__(self, *, job_id: Optional[str] = None, path: Optional[str] = None, **kwargs: Any) -> None: """ :keyword job_id: ARM resource ID of the job. :paramtype job_id: str :keyword path: The path of the file/directory in the job output. :paramtype path: str """ - super(OutputPathAssetReference, self).__init__(**kwargs) - self.reference_type = 'OutputPath' # type: str + super().__init__(**kwargs) + self.reference_type: str = "OutputPath" self.job_id = job_id self.path = path -class PackageInputPathBase(msrest.serialization.Model): +class PackageInputPathBase(_serialization.Model): """PackageInputPathBase. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: PackageInputPathId, PackageInputPathVersion, PackageInputPathUrl. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + PackageInputPathId, PackageInputPathVersion, PackageInputPathUrl All required parameters must be populated in order to send to Azure. - :ivar input_path_type: Required. [Required] Input path type for package inputs.Constant filled - by server. Possible values include: "Url", "PathId", "PathVersion". + :ivar input_path_type: [Required] Input path type for package inputs. Required. Known values + are: "Url", "PathId", and "PathVersion". :vartype input_path_type: str or ~azure.mgmt.machinelearningservices.models.InputPathType """ _validation = { - 'input_path_type': {'required': True}, + "input_path_type": {"required": True}, } _attribute_map = { - 'input_path_type': {'key': 'inputPathType', 'type': 'str'}, + "input_path_type": {"key": "inputPathType", "type": "str"}, } _subtype_map = { - 'input_path_type': {'PathId': 'PackageInputPathId', 'PathVersion': 'PackageInputPathVersion', 'Url': 'PackageInputPathUrl'} + "input_path_type": { + "PathId": "PackageInputPathId", + "PathVersion": "PackageInputPathVersion", + "Url": "PackageInputPathUrl", + } } - def __init__( - self, - **kwargs - ): - """ - """ - super(PackageInputPathBase, self).__init__(**kwargs) - self.input_path_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.input_path_type: Optional[str] = None class PackageInputPathId(PackageInputPathBase): @@ -23972,34 +23861,29 @@ class PackageInputPathId(PackageInputPathBase): All required parameters must be populated in order to send to Azure. - :ivar input_path_type: Required. [Required] Input path type for package inputs.Constant filled - by server. Possible values include: "Url", "PathId", "PathVersion". + :ivar input_path_type: [Required] Input path type for package inputs. Required. Known values + are: "Url", "PathId", and "PathVersion". :vartype input_path_type: str or ~azure.mgmt.machinelearningservices.models.InputPathType :ivar resource_id: Input resource id. :vartype resource_id: str """ _validation = { - 'input_path_type': {'required': True}, + "input_path_type": {"required": True}, } _attribute_map = { - 'input_path_type': {'key': 'inputPathType', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, + "input_path_type": {"key": "inputPathType", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, } - def __init__( - self, - *, - resource_id: Optional[str] = None, - **kwargs - ): + def __init__(self, *, resource_id: Optional[str] = None, **kwargs: Any) -> None: """ :keyword resource_id: Input resource id. :paramtype resource_id: str """ - super(PackageInputPathId, self).__init__(**kwargs) - self.input_path_type = 'PathId' # type: str + super().__init__(**kwargs) + self.input_path_type: str = "PathId" self.resource_id = resource_id @@ -24008,34 +23892,29 @@ class PackageInputPathUrl(PackageInputPathBase): All required parameters must be populated in order to send to Azure. - :ivar input_path_type: Required. [Required] Input path type for package inputs.Constant filled - by server. Possible values include: "Url", "PathId", "PathVersion". + :ivar input_path_type: [Required] Input path type for package inputs. Required. Known values + are: "Url", "PathId", and "PathVersion". :vartype input_path_type: str or ~azure.mgmt.machinelearningservices.models.InputPathType :ivar url: Input path url. :vartype url: str """ _validation = { - 'input_path_type': {'required': True}, + "input_path_type": {"required": True}, } _attribute_map = { - 'input_path_type': {'key': 'inputPathType', 'type': 'str'}, - 'url': {'key': 'url', 'type': 'str'}, + "input_path_type": {"key": "inputPathType", "type": "str"}, + "url": {"key": "url", "type": "str"}, } - def __init__( - self, - *, - url: Optional[str] = None, - **kwargs - ): + def __init__(self, *, url: Optional[str] = None, **kwargs: Any) -> None: """ :keyword url: Input path url. :paramtype url: str """ - super(PackageInputPathUrl, self).__init__(**kwargs) - self.input_path_type = 'Url' # type: str + super().__init__(**kwargs) + self.input_path_type: str = "Url" self.url = url @@ -24044,8 +23923,8 @@ class PackageInputPathVersion(PackageInputPathBase): All required parameters must be populated in order to send to Azure. - :ivar input_path_type: Required. [Required] Input path type for package inputs.Constant filled - by server. Possible values include: "Url", "PathId", "PathVersion". + :ivar input_path_type: [Required] Input path type for package inputs. Required. Known values + are: "Url", "PathId", and "PathVersion". :vartype input_path_type: str or ~azure.mgmt.machinelearningservices.models.InputPathType :ivar resource_name: Input resource name. :vartype resource_name: str @@ -24054,35 +23933,31 @@ class PackageInputPathVersion(PackageInputPathBase): """ _validation = { - 'input_path_type': {'required': True}, + "input_path_type": {"required": True}, } _attribute_map = { - 'input_path_type': {'key': 'inputPathType', 'type': 'str'}, - 'resource_name': {'key': 'resourceName', 'type': 'str'}, - 'resource_version': {'key': 'resourceVersion', 'type': 'str'}, + "input_path_type": {"key": "inputPathType", "type": "str"}, + "resource_name": {"key": "resourceName", "type": "str"}, + "resource_version": {"key": "resourceVersion", "type": "str"}, } def __init__( - self, - *, - resource_name: Optional[str] = None, - resource_version: Optional[str] = None, - **kwargs - ): + self, *, resource_name: Optional[str] = None, resource_version: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword resource_name: Input resource name. :paramtype resource_name: str :keyword resource_version: Input resource version. :paramtype resource_version: str """ - super(PackageInputPathVersion, self).__init__(**kwargs) - self.input_path_type = 'PathVersion' # type: str + super().__init__(**kwargs) + self.input_path_type: str = "PathVersion" self.resource_name = resource_name self.resource_version = resource_version -class PackageRequest(msrest.serialization.Model): +class PackageRequest(_serialization.Model): """Model package operation request properties. All required parameters must be populated in order to send to Azure. @@ -24092,7 +23967,7 @@ class PackageRequest(msrest.serialization.Model): ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSource :ivar environment_variables: Collection of environment variables. :vartype environment_variables: dict[str, str] - :ivar inferencing_server: Required. [Required] Inferencing server configurations. + :ivar inferencing_server: [Required] Inferencing server configurations. Required. :vartype inferencing_server: ~azure.mgmt.machinelearningservices.models.InferencingServer :ivar inputs: Collection of inputs. :vartype inputs: list[~azure.mgmt.machinelearningservices.models.ModelPackageInput] @@ -24100,49 +23975,49 @@ class PackageRequest(msrest.serialization.Model): :vartype model_configuration: ~azure.mgmt.machinelearningservices.models.ModelConfiguration :ivar properties: Property dictionary. Properties can be added, removed, and updated. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar target_environment_id: Required. [Required] Arm ID of the target environment to be - created by package operation. + :ivar target_environment_id: [Required] Arm ID of the target environment to be created by + package operation. Required. :vartype target_environment_id: str """ _validation = { - 'inferencing_server': {'required': True}, - 'target_environment_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "inferencing_server": {"required": True}, + "target_environment_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'base_environment_source': {'key': 'baseEnvironmentSource', 'type': 'BaseEnvironmentSource'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'inferencing_server': {'key': 'inferencingServer', 'type': 'InferencingServer'}, - 'inputs': {'key': 'inputs', 'type': '[ModelPackageInput]'}, - 'model_configuration': {'key': 'modelConfiguration', 'type': 'ModelConfiguration'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'target_environment_id': {'key': 'targetEnvironmentId', 'type': 'str'}, + "base_environment_source": {"key": "baseEnvironmentSource", "type": "BaseEnvironmentSource"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "inferencing_server": {"key": "inferencingServer", "type": "InferencingServer"}, + "inputs": {"key": "inputs", "type": "[ModelPackageInput]"}, + "model_configuration": {"key": "modelConfiguration", "type": "ModelConfiguration"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "target_environment_id": {"key": "targetEnvironmentId", "type": "str"}, } def __init__( self, *, - inferencing_server: "InferencingServer", + inferencing_server: "_models.InferencingServer", target_environment_id: str, - base_environment_source: Optional["BaseEnvironmentSource"] = None, + base_environment_source: Optional["_models.BaseEnvironmentSource"] = None, environment_variables: Optional[Dict[str, str]] = None, - inputs: Optional[List["ModelPackageInput"]] = None, - model_configuration: Optional["ModelConfiguration"] = None, + inputs: Optional[List["_models.ModelPackageInput"]] = None, + model_configuration: Optional["_models.ModelConfiguration"] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword base_environment_source: Base environment to start with. :paramtype base_environment_source: ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSource :keyword environment_variables: Collection of environment variables. :paramtype environment_variables: dict[str, str] - :keyword inferencing_server: Required. [Required] Inferencing server configurations. + :keyword inferencing_server: [Required] Inferencing server configurations. Required. :paramtype inferencing_server: ~azure.mgmt.machinelearningservices.models.InferencingServer :keyword inputs: Collection of inputs. :paramtype inputs: list[~azure.mgmt.machinelearningservices.models.ModelPackageInput] @@ -24150,13 +24025,13 @@ def __init__( :paramtype model_configuration: ~azure.mgmt.machinelearningservices.models.ModelConfiguration :keyword properties: Property dictionary. Properties can be added, removed, and updated. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword target_environment_id: Required. [Required] Arm ID of the target environment to be - created by package operation. + :keyword target_environment_id: [Required] Arm ID of the target environment to be created by + package operation. Required. :paramtype target_environment_id: str """ - super(PackageRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.base_environment_source = base_environment_source self.environment_variables = environment_variables self.inferencing_server = inferencing_server @@ -24167,7 +24042,7 @@ def __init__( self.target_environment_id = target_environment_id -class PackageResponse(msrest.serialization.Model): +class PackageResponse(_serialization.Model): # pylint: disable=too-many-instance-attributes """Package response returned after async package operation completes successfully. Variables are only populated by the server, and will be ignored when sending a request. @@ -24177,8 +24052,8 @@ class PackageResponse(msrest.serialization.Model): ~azure.mgmt.machinelearningservices.models.BaseEnvironmentSource :ivar build_id: Build id of the image build operation. :vartype build_id: str - :ivar build_state: Build state of the image build operation. Possible values include: - "NotStarted", "Running", "Succeeded", "Failed". + :ivar build_state: Build state of the image build operation. Known values are: "NotStarted", + "Running", "Succeeded", and "Failed". :vartype build_state: str or ~azure.mgmt.machinelearningservices.models.PackageBuildState :ivar environment_variables: Collection of environment variables. :vartype environment_variables: dict[str, str] @@ -24192,47 +24067,43 @@ class PackageResponse(msrest.serialization.Model): :vartype model_configuration: ~azure.mgmt.machinelearningservices.models.ModelConfiguration :ivar properties: Property dictionary. Tags can be added, removed, and updated. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar target_environment_id: Asset ID of the target environment created by package operation. :vartype target_environment_id: str """ _validation = { - 'base_environment_source': {'readonly': True}, - 'build_id': {'readonly': True}, - 'build_state': {'readonly': True}, - 'environment_variables': {'readonly': True}, - 'inferencing_server': {'readonly': True}, - 'inputs': {'readonly': True}, - 'log_url': {'readonly': True}, - 'model_configuration': {'readonly': True}, - 'properties': {'readonly': True}, - 'tags': {'readonly': True}, - 'target_environment_id': {'readonly': True}, + "base_environment_source": {"readonly": True}, + "build_id": {"readonly": True}, + "build_state": {"readonly": True}, + "environment_variables": {"readonly": True}, + "inferencing_server": {"readonly": True}, + "inputs": {"readonly": True}, + "log_url": {"readonly": True}, + "model_configuration": {"readonly": True}, + "properties": {"readonly": True}, + "tags": {"readonly": True}, + "target_environment_id": {"readonly": True}, } _attribute_map = { - 'base_environment_source': {'key': 'baseEnvironmentSource', 'type': 'BaseEnvironmentSource'}, - 'build_id': {'key': 'buildId', 'type': 'str'}, - 'build_state': {'key': 'buildState', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'inferencing_server': {'key': 'inferencingServer', 'type': 'InferencingServer'}, - 'inputs': {'key': 'inputs', 'type': '[ModelPackageInput]'}, - 'log_url': {'key': 'logUrl', 'type': 'str'}, - 'model_configuration': {'key': 'modelConfiguration', 'type': 'ModelConfiguration'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'target_environment_id': {'key': 'targetEnvironmentId', 'type': 'str'}, + "base_environment_source": {"key": "baseEnvironmentSource", "type": "BaseEnvironmentSource"}, + "build_id": {"key": "buildId", "type": "str"}, + "build_state": {"key": "buildState", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "inferencing_server": {"key": "inferencingServer", "type": "InferencingServer"}, + "inputs": {"key": "inputs", "type": "[ModelPackageInput]"}, + "log_url": {"key": "logUrl", "type": "str"}, + "model_configuration": {"key": "modelConfiguration", "type": "ModelConfiguration"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "target_environment_id": {"key": "targetEnvironmentId", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(PackageResponse, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.base_environment_source = None self.build_id = None self.build_state = None @@ -24246,7 +24117,7 @@ def __init__( self.target_environment_id = None -class PaginatedComputeResourcesList(msrest.serialization.Model): +class PaginatedComputeResourcesList(_serialization.Model): """Paginated list of Machine Learning compute objects wrapped in ARM resource envelope. :ivar value: An array of Machine Learning compute objects wrapped in ARM resource envelope. @@ -24256,29 +24127,25 @@ class PaginatedComputeResourcesList(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[ComputeResource]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[ComputeResource]"}, + "next_link": {"key": "nextLink", "type": "str"}, } def __init__( - self, - *, - value: Optional[List["ComputeResource"]] = None, - next_link: Optional[str] = None, - **kwargs - ): + self, *, value: Optional[List["_models.ComputeResource"]] = None, next_link: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword value: An array of Machine Learning compute objects wrapped in ARM resource envelope. :paramtype value: list[~azure.mgmt.machinelearningservices.models.ComputeResource] :keyword next_link: A continuation link (absolute URI) to the next page of results in the list. :paramtype next_link: str """ - super(PaginatedComputeResourcesList, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.next_link = next_link -class PartialBatchDeployment(msrest.serialization.Model): +class PartialBatchDeployment(_serialization.Model): """Mutable batch inference settings per deployment. :ivar description: Description of the endpoint deployment. @@ -24286,56 +24153,51 @@ class PartialBatchDeployment(msrest.serialization.Model): """ _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, } - def __init__( - self, - *, - description: Optional[str] = None, - **kwargs - ): + def __init__(self, *, description: Optional[str] = None, **kwargs: Any) -> None: """ :keyword description: Description of the endpoint deployment. :paramtype description: str """ - super(PartialBatchDeployment, self).__init__(**kwargs) + super().__init__(**kwargs) self.description = description -class PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties(msrest.serialization.Model): +class PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties(_serialization.Model): """Strictly used in update requests. :ivar properties: Additional attributes of the entity. :vartype properties: ~azure.mgmt.machinelearningservices.models.PartialBatchDeployment - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'PartialBatchDeployment'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + "properties": {"key": "properties", "type": "PartialBatchDeployment"}, + "tags": {"key": "tags", "type": "{str}"}, } def __init__( self, *, - properties: Optional["PartialBatchDeployment"] = None, + properties: Optional["_models.PartialBatchDeployment"] = None, tags: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword properties: Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.PartialBatchDeployment - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] """ - super(PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties self.tags = tags -class PartialJobBase(msrest.serialization.Model): +class PartialJobBase(_serialization.Model): """Mutable base definition for a job. :ivar notification_setting: Mutable notification setting for the job. @@ -24344,25 +24206,22 @@ class PartialJobBase(msrest.serialization.Model): """ _attribute_map = { - 'notification_setting': {'key': 'notificationSetting', 'type': 'PartialNotificationSetting'}, + "notification_setting": {"key": "notificationSetting", "type": "PartialNotificationSetting"}, } def __init__( - self, - *, - notification_setting: Optional["PartialNotificationSetting"] = None, - **kwargs - ): + self, *, notification_setting: Optional["_models.PartialNotificationSetting"] = None, **kwargs: Any + ) -> None: """ :keyword notification_setting: Mutable notification setting for the job. :paramtype notification_setting: ~azure.mgmt.machinelearningservices.models.PartialNotificationSetting """ - super(PartialJobBase, self).__init__(**kwargs) + super().__init__(**kwargs) self.notification_setting = notification_setting -class PartialJobBasePartialResource(msrest.serialization.Model): +class PartialJobBasePartialResource(_serialization.Model): """Azure Resource Manager resource envelope strictly used in update requests. :ivar properties: Additional attributes of the entity. @@ -24370,156 +24229,140 @@ class PartialJobBasePartialResource(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'PartialJobBase'}, + "properties": {"key": "properties", "type": "PartialJobBase"}, } - def __init__( - self, - *, - properties: Optional["PartialJobBase"] = None, - **kwargs - ): + def __init__(self, *, properties: Optional["_models.PartialJobBase"] = None, **kwargs: Any) -> None: """ :keyword properties: Additional attributes of the entity. :paramtype properties: ~azure.mgmt.machinelearningservices.models.PartialJobBase """ - super(PartialJobBasePartialResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class PartialManagedServiceIdentity(msrest.serialization.Model): +class PartialManagedServiceIdentity(_serialization.Model): """Managed service identity (system assigned and/or user assigned identities). - :ivar type: Managed service identity (system assigned and/or user assigned identities). - Possible values include: "None", "SystemAssigned", "UserAssigned", - "SystemAssigned,UserAssigned". + :ivar type: Managed service identity (system assigned and/or user assigned identities). Known + values are: "None", "SystemAssigned", "UserAssigned", and "SystemAssigned,UserAssigned". :vartype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType :ivar user_assigned_identities: The set of user assigned identities associated with the resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. The dictionary values can be empty objects ({}) in requests. - :vartype user_assigned_identities: dict[str, any] + :vartype user_assigned_identities: dict[str, JSON] """ _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{object}'}, + "type": {"key": "type", "type": "str"}, + "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{object}"}, } def __init__( self, *, - type: Optional[Union[str, "ManagedServiceIdentityType"]] = None, - user_assigned_identities: Optional[Dict[str, Any]] = None, - **kwargs - ): + type: Optional[Union[str, "_models.ManagedServiceIdentityType"]] = None, + user_assigned_identities: Optional[Dict[str, JSON]] = None, + **kwargs: Any + ) -> None: """ :keyword type: Managed service identity (system assigned and/or user assigned identities). - Possible values include: "None", "SystemAssigned", "UserAssigned", - "SystemAssigned,UserAssigned". + Known values are: "None", "SystemAssigned", "UserAssigned", and "SystemAssigned,UserAssigned". :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType :keyword user_assigned_identities: The set of user assigned identities associated with the resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}. The dictionary values can be empty objects ({}) in requests. - :paramtype user_assigned_identities: dict[str, any] + :paramtype user_assigned_identities: dict[str, JSON] """ - super(PartialManagedServiceIdentity, self).__init__(**kwargs) + super().__init__(**kwargs) self.type = type self.user_assigned_identities = user_assigned_identities -class PartialMinimalTrackedResource(msrest.serialization.Model): +class PartialMinimalTrackedResource(_serialization.Model): """Strictly used in update requests. - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] """ _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, + "tags": {"key": "tags", "type": "{str}"}, } - def __init__( - self, - *, - tags: Optional[Dict[str, str]] = None, - **kwargs - ): + def __init__(self, *, tags: Optional[Dict[str, str]] = None, **kwargs: Any) -> None: """ - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] """ - super(PartialMinimalTrackedResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.tags = tags class PartialMinimalTrackedResourceWithIdentity(PartialMinimalTrackedResource): """Strictly used in update requests. - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar identity: Managed service identity (system assigned and/or user assigned identities). :vartype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity """ _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'PartialManagedServiceIdentity'}, + "tags": {"key": "tags", "type": "{str}"}, + "identity": {"key": "identity", "type": "PartialManagedServiceIdentity"}, } def __init__( self, *, tags: Optional[Dict[str, str]] = None, - identity: Optional["PartialManagedServiceIdentity"] = None, - **kwargs - ): + identity: Optional["_models.PartialManagedServiceIdentity"] = None, + **kwargs: Any + ) -> None: """ - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity """ - super(PartialMinimalTrackedResourceWithIdentity, self).__init__(tags=tags, **kwargs) + super().__init__(tags=tags, **kwargs) self.identity = identity class PartialMinimalTrackedResourceWithSku(PartialMinimalTrackedResource): """Strictly used in update requests. - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar sku: Sku details required for ARM contract for Autoscaling. :vartype sku: ~azure.mgmt.machinelearningservices.models.PartialSku """ _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'sku': {'key': 'sku', 'type': 'PartialSku'}, + "tags": {"key": "tags", "type": "{str}"}, + "sku": {"key": "sku", "type": "PartialSku"}, } def __init__( - self, - *, - tags: Optional[Dict[str, str]] = None, - sku: Optional["PartialSku"] = None, - **kwargs - ): + self, *, tags: Optional[Dict[str, str]] = None, sku: Optional["_models.PartialSku"] = None, **kwargs: Any + ) -> None: """ - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword sku: Sku details required for ARM contract for Autoscaling. :paramtype sku: ~azure.mgmt.machinelearningservices.models.PartialSku """ - super(PartialMinimalTrackedResourceWithSku, self).__init__(tags=tags, **kwargs) + super().__init__(tags=tags, **kwargs) self.sku = sku class PartialMinimalTrackedResourceWithSkuAndIdentity(PartialMinimalTrackedResource): """Strictly used in update requests. - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] :ivar identity: Managed service identity (system assigned and/or user assigned identities). :vartype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity @@ -24528,33 +24371,33 @@ class PartialMinimalTrackedResourceWithSkuAndIdentity(PartialMinimalTrackedResou """ _attribute_map = { - 'tags': {'key': 'tags', 'type': '{str}'}, - 'identity': {'key': 'identity', 'type': 'PartialManagedServiceIdentity'}, - 'sku': {'key': 'sku', 'type': 'PartialSku'}, + "tags": {"key": "tags", "type": "{str}"}, + "identity": {"key": "identity", "type": "PartialManagedServiceIdentity"}, + "sku": {"key": "sku", "type": "PartialSku"}, } def __init__( self, *, tags: Optional[Dict[str, str]] = None, - identity: Optional["PartialManagedServiceIdentity"] = None, - sku: Optional["PartialSku"] = None, - **kwargs - ): + identity: Optional["_models.PartialManagedServiceIdentity"] = None, + sku: Optional["_models.PartialSku"] = None, + **kwargs: Any + ) -> None: """ - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.PartialManagedServiceIdentity :keyword sku: Sku details required for ARM contract for Autoscaling. :paramtype sku: ~azure.mgmt.machinelearningservices.models.PartialSku """ - super(PartialMinimalTrackedResourceWithSkuAndIdentity, self).__init__(tags=tags, **kwargs) + super().__init__(tags=tags, **kwargs) self.identity = identity self.sku = sku -class PartialNotificationSetting(msrest.serialization.Model): +class PartialNotificationSetting(_serialization.Model): """Mutable configuration for notification. :ivar webhooks: Send webhook callback to a service. Key is a user-provided name for the @@ -24563,25 +24406,20 @@ class PartialNotificationSetting(msrest.serialization.Model): """ _attribute_map = { - 'webhooks': {'key': 'webhooks', 'type': '{Webhook}'}, + "webhooks": {"key": "webhooks", "type": "{Webhook}"}, } - def __init__( - self, - *, - webhooks: Optional[Dict[str, "Webhook"]] = None, - **kwargs - ): + def __init__(self, *, webhooks: Optional[Dict[str, "_models.Webhook"]] = None, **kwargs: Any) -> None: """ :keyword webhooks: Send webhook callback to a service. Key is a user-provided name for the webhook. :paramtype webhooks: dict[str, ~azure.mgmt.machinelearningservices.models.Webhook] """ - super(PartialNotificationSetting, self).__init__(**kwargs) + super().__init__(**kwargs) self.webhooks = webhooks -class PartialRegistryPartialTrackedResource(msrest.serialization.Model): +class PartialRegistryPartialTrackedResource(_serialization.Model): """Strictly used in update requests. :ivar identity: Managed service identity (system assigned and/or user assigned identities). @@ -24589,40 +24427,40 @@ class PartialRegistryPartialTrackedResource(msrest.serialization.Model): ~azure.mgmt.machinelearningservices.models.RegistryPartialManagedServiceIdentity :ivar sku: Sku details required for ARM contract for Autoscaling. :vartype sku: ~azure.mgmt.machinelearningservices.models.PartialSku - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] """ _attribute_map = { - 'identity': {'key': 'identity', 'type': 'RegistryPartialManagedServiceIdentity'}, - 'sku': {'key': 'sku', 'type': 'PartialSku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, + "identity": {"key": "identity", "type": "RegistryPartialManagedServiceIdentity"}, + "sku": {"key": "sku", "type": "PartialSku"}, + "tags": {"key": "tags", "type": "{str}"}, } def __init__( self, *, - identity: Optional["RegistryPartialManagedServiceIdentity"] = None, - sku: Optional["PartialSku"] = None, + identity: Optional["_models.RegistryPartialManagedServiceIdentity"] = None, + sku: Optional["_models.PartialSku"] = None, tags: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.RegistryPartialManagedServiceIdentity :keyword sku: Sku details required for ARM contract for Autoscaling. :paramtype sku: ~azure.mgmt.machinelearningservices.models.PartialSku - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] """ - super(PartialRegistryPartialTrackedResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.identity = identity self.sku = sku self.tags = tags -class PartialSku(msrest.serialization.Model): +class PartialSku(_serialization.Model): """Common SKU definition. :ivar capacity: If the SKU supports scale out/in then the capacity integer should be included. @@ -24637,17 +24475,17 @@ class PartialSku(msrest.serialization.Model): this would be the standalone code. :vartype size: str :ivar tier: This field is required to be implemented by the Resource Provider if the service - has more than one tier, but is not required on a PUT. Possible values include: "Free", "Basic", - "Standard", "Premium". + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", and "Premium". :vartype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier """ _attribute_map = { - 'capacity': {'key': 'capacity', 'type': 'int'}, - 'family': {'key': 'family', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, + "capacity": {"key": "capacity", "type": "int"}, + "family": {"key": "family", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "size": {"key": "size", "type": "str"}, + "tier": {"key": "tier", "type": "str"}, } def __init__( @@ -24657,9 +24495,9 @@ def __init__( family: Optional[str] = None, name: Optional[str] = None, size: Optional[str] = None, - tier: Optional[Union[str, "SkuTier"]] = None, - **kwargs - ): + tier: Optional[Union[str, "_models.SkuTier"]] = None, + **kwargs: Any + ) -> None: """ :keyword capacity: If the SKU supports scale out/in then the capacity integer should be included. If scale out/in is not possible for the resource this may be omitted. @@ -24673,11 +24511,11 @@ def __init__( value, this would be the standalone code. :paramtype size: str :keyword tier: This field is required to be implemented by the Resource Provider if the service - has more than one tier, but is not required on a PUT. Possible values include: "Free", "Basic", - "Standard", "Premium". + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", and "Premium". :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier """ - super(PartialSku, self).__init__(**kwargs) + super().__init__(**kwargs) self.capacity = capacity self.family = family self.name = name @@ -24685,7 +24523,7 @@ def __init__( self.tier = tier -class Password(msrest.serialization.Model): +class Password(_serialization.Model): """Password. Variables are only populated by the server, and will be ignored when sending a request. @@ -24697,22 +24535,18 @@ class Password(msrest.serialization.Model): """ _validation = { - 'name': {'readonly': True}, - 'value': {'readonly': True}, + "name": {"readonly": True}, + "value": {"readonly": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'value': {'key': 'value', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "value": {"key": "value", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(Password, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.name = None self.value = None @@ -24720,21 +24554,28 @@ def __init__( class PATAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """PATAuthTypeWorkspaceConnectionProperties. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", + :ivar category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar created_by_workspace_arm_id: The arm id of the workspace which created this connection. + :vartype created_by_workspace_arm_id: str :ivar expiry_time: :vartype expiry_time: ~datetime.datetime + :ivar is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :vartype is_shared_to_all: bool :ivar metadata: Any object. - :vartype metadata: any + :vartype metadata: JSON :ivar target: :vartype target: str :ivar credentials: @@ -24743,122 +24584,130 @@ class PATAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """ _validation = { - 'auth_type': {'required': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionPersonalAccessToken'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionPersonalAccessToken"}, } def __init__( self, *, - category: Optional[Union[str, "ConnectionCategory"]] = None, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, expiry_time: Optional[datetime.datetime] = None, - metadata: Optional[Any] = None, + is_shared_to_all: Optional[bool] = None, + metadata: Optional[JSON] = None, target: Optional[str] = None, - credentials: Optional["WorkspaceConnectionPersonalAccessToken"] = None, - **kwargs - ): + credentials: Optional["_models.WorkspaceConnectionPersonalAccessToken"] = None, + **kwargs: Any + ) -> None: """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", + :keyword category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime + :keyword is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :paramtype is_shared_to_all: bool :keyword metadata: Any object. - :paramtype metadata: any + :paramtype metadata: JSON :keyword target: :paramtype target: str :keyword credentials: :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPersonalAccessToken """ - super(PATAuthTypeWorkspaceConnectionProperties, self).__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) - self.auth_type = 'PAT' # type: str + super().__init__( + category=category, + expiry_time=expiry_time, + is_shared_to_all=is_shared_to_all, + metadata=metadata, + target=target, + **kwargs + ) + self.auth_type: str = "PAT" self.credentials = credentials -class PendingUploadCredentialDto(msrest.serialization.Model): +class PendingUploadCredentialDto(_serialization.Model): """PendingUploadCredentialDto. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SASCredentialDto. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + SASCredentialDto All required parameters must be populated in order to send to Azure. - :ivar credential_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "SAS". + :ivar credential_type: [Required] Credential type used to authentication with storage. + Required. "SAS" :vartype credential_type: str or ~azure.mgmt.machinelearningservices.models.PendingUploadCredentialType """ _validation = { - 'credential_type': {'required': True}, + "credential_type": {"required": True}, } _attribute_map = { - 'credential_type': {'key': 'credentialType', 'type': 'str'}, + "credential_type": {"key": "credentialType", "type": "str"}, } - _subtype_map = { - 'credential_type': {'SAS': 'SASCredentialDto'} - } + _subtype_map = {"credential_type": {"SAS": "SASCredentialDto"}} - def __init__( - self, - **kwargs - ): - """ - """ - super(PendingUploadCredentialDto, self).__init__(**kwargs) - self.credential_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.credential_type: Optional[str] = None -class PendingUploadRequestDto(msrest.serialization.Model): +class PendingUploadRequestDto(_serialization.Model): """PendingUploadRequestDto. :ivar pending_upload_id: If PendingUploadId = null then random guid will be used. :vartype pending_upload_id: str - :ivar pending_upload_type: TemporaryBlobReference is the only supported type. Possible values - include: "None", "TemporaryBlobReference". + :ivar pending_upload_type: TemporaryBlobReference is the only supported type. Known values are: + "None" and "TemporaryBlobReference". :vartype pending_upload_type: str or ~azure.mgmt.machinelearningservices.models.PendingUploadType """ _attribute_map = { - 'pending_upload_id': {'key': 'pendingUploadId', 'type': 'str'}, - 'pending_upload_type': {'key': 'pendingUploadType', 'type': 'str'}, + "pending_upload_id": {"key": "pendingUploadId", "type": "str"}, + "pending_upload_type": {"key": "pendingUploadType", "type": "str"}, } def __init__( self, *, pending_upload_id: Optional[str] = None, - pending_upload_type: Optional[Union[str, "PendingUploadType"]] = None, - **kwargs - ): + pending_upload_type: Optional[Union[str, "_models.PendingUploadType"]] = None, + **kwargs: Any + ) -> None: """ :keyword pending_upload_id: If PendingUploadId = null then random guid will be used. :paramtype pending_upload_id: str - :keyword pending_upload_type: TemporaryBlobReference is the only supported type. Possible - values include: "None", "TemporaryBlobReference". + :keyword pending_upload_type: TemporaryBlobReference is the only supported type. Known values + are: "None" and "TemporaryBlobReference". :paramtype pending_upload_type: str or ~azure.mgmt.machinelearningservices.models.PendingUploadType """ - super(PendingUploadRequestDto, self).__init__(**kwargs) + super().__init__(**kwargs) self.pending_upload_id = pending_upload_id self.pending_upload_type = pending_upload_type -class PendingUploadResponseDto(msrest.serialization.Model): +class PendingUploadResponseDto(_serialization.Model): """PendingUploadResponseDto. :ivar blob_reference_for_consumption: Container level read, write, list SAS. @@ -24866,44 +24715,47 @@ class PendingUploadResponseDto(msrest.serialization.Model): ~azure.mgmt.machinelearningservices.models.BlobReferenceForConsumptionDto :ivar pending_upload_id: ID for this upload request. :vartype pending_upload_id: str - :ivar pending_upload_type: TemporaryBlobReference is the only supported type. Possible values - include: "None", "TemporaryBlobReference". + :ivar pending_upload_type: TemporaryBlobReference is the only supported type. Known values are: + "None" and "TemporaryBlobReference". :vartype pending_upload_type: str or ~azure.mgmt.machinelearningservices.models.PendingUploadType """ _attribute_map = { - 'blob_reference_for_consumption': {'key': 'blobReferenceForConsumption', 'type': 'BlobReferenceForConsumptionDto'}, - 'pending_upload_id': {'key': 'pendingUploadId', 'type': 'str'}, - 'pending_upload_type': {'key': 'pendingUploadType', 'type': 'str'}, + "blob_reference_for_consumption": { + "key": "blobReferenceForConsumption", + "type": "BlobReferenceForConsumptionDto", + }, + "pending_upload_id": {"key": "pendingUploadId", "type": "str"}, + "pending_upload_type": {"key": "pendingUploadType", "type": "str"}, } def __init__( self, *, - blob_reference_for_consumption: Optional["BlobReferenceForConsumptionDto"] = None, + blob_reference_for_consumption: Optional["_models.BlobReferenceForConsumptionDto"] = None, pending_upload_id: Optional[str] = None, - pending_upload_type: Optional[Union[str, "PendingUploadType"]] = None, - **kwargs - ): + pending_upload_type: Optional[Union[str, "_models.PendingUploadType"]] = None, + **kwargs: Any + ) -> None: """ :keyword blob_reference_for_consumption: Container level read, write, list SAS. :paramtype blob_reference_for_consumption: ~azure.mgmt.machinelearningservices.models.BlobReferenceForConsumptionDto :keyword pending_upload_id: ID for this upload request. :paramtype pending_upload_id: str - :keyword pending_upload_type: TemporaryBlobReference is the only supported type. Possible - values include: "None", "TemporaryBlobReference". + :keyword pending_upload_type: TemporaryBlobReference is the only supported type. Known values + are: "None" and "TemporaryBlobReference". :paramtype pending_upload_type: str or ~azure.mgmt.machinelearningservices.models.PendingUploadType """ - super(PendingUploadResponseDto, self).__init__(**kwargs) + super().__init__(**kwargs) self.blob_reference_for_consumption = blob_reference_for_consumption self.pending_upload_id = pending_upload_id self.pending_upload_type = pending_upload_type -class PersonalComputeInstanceSettings(msrest.serialization.Model): +class PersonalComputeInstanceSettings(_serialization.Model): """Settings for a personal compute instance. :ivar assigned_user: A user explicitly assigned to a personal compute instance. @@ -24911,24 +24763,19 @@ class PersonalComputeInstanceSettings(msrest.serialization.Model): """ _attribute_map = { - 'assigned_user': {'key': 'assignedUser', 'type': 'AssignedUser'}, + "assigned_user": {"key": "assignedUser", "type": "AssignedUser"}, } - def __init__( - self, - *, - assigned_user: Optional["AssignedUser"] = None, - **kwargs - ): + def __init__(self, *, assigned_user: Optional["_models.AssignedUser"] = None, **kwargs: Any) -> None: """ :keyword assigned_user: A user explicitly assigned to a personal compute instance. :paramtype assigned_user: ~azure.mgmt.machinelearningservices.models.AssignedUser """ - super(PersonalComputeInstanceSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.assigned_user = assigned_user -class PipelineJob(JobBaseProperties): +class PipelineJob(JobBaseProperties): # pylint: disable=too-many-instance-attributes """Pipeline Job definition: defines generic to MFE attributes. Variables are only populated by the server, and will be ignored when sending a request. @@ -24939,7 +24786,7 @@ class PipelineJob(JobBaseProperties): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar component_id: ARM resource ID of the component resource. :vartype component_id: str @@ -24956,8 +24803,8 @@ class PipelineJob(JobBaseProperties): :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration :ivar is_archived: Is the asset archived?. :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType :ivar notification_setting: Notification setting for the job. :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting @@ -24967,47 +24814,47 @@ class PipelineJob(JobBaseProperties): :ivar services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus :ivar inputs: Inputs for the pipeline job. :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] :ivar jobs: Jobs construct the Pipeline Job. - :vartype jobs: dict[str, any] + :vartype jobs: dict[str, JSON] :ivar outputs: Outputs for the pipeline job. :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] :ivar settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. - :vartype settings: any + :vartype settings: JSON :ivar source_job_id: ARM resource ID of source job. :vartype source_job_id: str """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, + "job_type": {"required": True}, + "status": {"readonly": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'jobs': {'key': 'jobs', 'type': '{object}'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'settings': {'key': 'settings', 'type': 'object'}, - 'source_job_id': {'key': 'sourceJobId', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "jobs": {"key": "jobs", "type": "{object}"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "settings": {"key": "settings", "type": "object"}, + "source_job_id": {"key": "sourceJobId", "type": "str"}, } def __init__( @@ -25019,25 +24866,25 @@ def __init__( component_id: Optional[str] = None, compute_id: Optional[str] = None, display_name: Optional[str] = None, - experiment_name: Optional[str] = "Default", - identity: Optional["IdentityConfiguration"] = None, - is_archived: Optional[bool] = False, - notification_setting: Optional["NotificationSetting"] = None, - secrets_configuration: Optional[Dict[str, "SecretConfiguration"]] = None, - services: Optional[Dict[str, "JobService"]] = None, - inputs: Optional[Dict[str, "JobInput"]] = None, - jobs: Optional[Dict[str, Any]] = None, - outputs: Optional[Dict[str, "JobOutput"]] = None, - settings: Optional[Any] = None, + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + notification_setting: Optional["_models.NotificationSetting"] = None, + secrets_configuration: Optional[Dict[str, "_models.SecretConfiguration"]] = None, + services: Optional[Dict[str, "_models.JobService"]] = None, + inputs: Optional[Dict[str, "_models.JobInput"]] = None, + jobs: Optional[Dict[str, JSON]] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + settings: Optional[JSON] = None, source_job_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword component_id: ARM resource ID of the component resource. :paramtype component_id: str @@ -25065,16 +24912,30 @@ def __init__( :keyword inputs: Inputs for the pipeline job. :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] :keyword jobs: Jobs construct the Pipeline Job. - :paramtype jobs: dict[str, any] + :paramtype jobs: dict[str, JSON] :keyword outputs: Outputs for the pipeline job. :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] :keyword settings: Pipeline settings, for things like ContinueRunOnStepFailure etc. - :paramtype settings: any + :paramtype settings: JSON :keyword source_job_id: ARM resource ID of source job. :paramtype source_job_id: str """ - super(PipelineJob, self).__init__(description=description, properties=properties, tags=tags, component_id=component_id, compute_id=compute_id, display_name=display_name, experiment_name=experiment_name, identity=identity, is_archived=is_archived, notification_setting=notification_setting, secrets_configuration=secrets_configuration, services=services, **kwargs) - self.job_type = 'Pipeline' # type: str + super().__init__( + description=description, + properties=properties, + tags=tags, + component_id=component_id, + compute_id=compute_id, + display_name=display_name, + experiment_name=experiment_name, + identity=identity, + is_archived=is_archived, + notification_setting=notification_setting, + secrets_configuration=secrets_configuration, + services=services, + **kwargs + ) + self.job_type: str = "Pipeline" self.inputs = inputs self.jobs = jobs self.outputs = outputs @@ -25087,85 +24948,84 @@ class PredictionDriftMonitoringSignal(MonitoringSignalBase): All required parameters must be populated in order to send to Azure. - :ivar mode: The current notification mode for this signal. Possible values include: "Disabled", + :ivar mode: The current notification mode for this signal. Known values are: "Disabled" and "Enabled". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :ivar properties: Property dictionary. Properties can be added, but not removed or altered. :vartype properties: dict[str, str] - :ivar signal_type: Required. [Required] Specifies the type of signal to monitor.Constant filled - by server. Possible values include: "DataDrift", "PredictionDrift", "DataQuality", - "FeatureAttributionDrift", "Custom", "ModelPerformance", "GenerationSafetyQuality", - "GenerationTokenStatistics". + :ivar signal_type: [Required] Specifies the type of signal to monitor. Required. Known values + are: "DataDrift", "PredictionDrift", "DataQuality", "FeatureAttributionDrift", "Custom", + "ModelPerformance", "GenerationSafetyQuality", and "GenerationTokenStatistics". :vartype signal_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringSignalType - :ivar metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. + :ivar metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. :vartype metric_thresholds: list[~azure.mgmt.machinelearningservices.models.PredictionDriftMetricThresholdBase] - :ivar model_type: Required. [Required] The type of the model monitored. Possible values - include: "Classification", "Regression". + :ivar model_type: [Required] The type of the model monitored. Required. Known values are: + "Classification" and "Regression". :vartype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType - :ivar production_data: Required. [Required] The data which drift will be calculated for. + :ivar production_data: [Required] The data which drift will be calculated for. Required. :vartype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :ivar reference_data: Required. [Required] The data to calculate drift against. + :ivar reference_data: [Required] The data to calculate drift against. Required. :vartype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ _validation = { - 'signal_type': {'required': True}, - 'metric_thresholds': {'required': True}, - 'model_type': {'required': True}, - 'production_data': {'required': True}, - 'reference_data': {'required': True}, + "signal_type": {"required": True}, + "metric_thresholds": {"required": True}, + "model_type": {"required": True}, + "production_data": {"required": True}, + "reference_data": {"required": True}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'signal_type': {'key': 'signalType', 'type': 'str'}, - 'metric_thresholds': {'key': 'metricThresholds', 'type': '[PredictionDriftMetricThresholdBase]'}, - 'model_type': {'key': 'modelType', 'type': 'str'}, - 'production_data': {'key': 'productionData', 'type': 'MonitoringInputDataBase'}, - 'reference_data': {'key': 'referenceData', 'type': 'MonitoringInputDataBase'}, + "mode": {"key": "mode", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "signal_type": {"key": "signalType", "type": "str"}, + "metric_thresholds": {"key": "metricThresholds", "type": "[PredictionDriftMetricThresholdBase]"}, + "model_type": {"key": "modelType", "type": "str"}, + "production_data": {"key": "productionData", "type": "MonitoringInputDataBase"}, + "reference_data": {"key": "referenceData", "type": "MonitoringInputDataBase"}, } def __init__( self, *, - metric_thresholds: List["PredictionDriftMetricThresholdBase"], - model_type: Union[str, "MonitoringModelType"], - production_data: "MonitoringInputDataBase", - reference_data: "MonitoringInputDataBase", - mode: Optional[Union[str, "MonitoringNotificationMode"]] = None, + metric_thresholds: List["_models.PredictionDriftMetricThresholdBase"], + model_type: Union[str, "_models.MonitoringModelType"], + production_data: "_models.MonitoringInputDataBase", + reference_data: "_models.MonitoringInputDataBase", + mode: Optional[Union[str, "_models.MonitoringNotificationMode"]] = None, properties: Optional[Dict[str, str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword mode: The current notification mode for this signal. Possible values include: - "Disabled", "Enabled". + :keyword mode: The current notification mode for this signal. Known values are: "Disabled" and + "Enabled". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.MonitoringNotificationMode :keyword properties: Property dictionary. Properties can be added, but not removed or altered. :paramtype properties: dict[str, str] - :keyword metric_thresholds: Required. [Required] A list of metrics to calculate and their - associated thresholds. + :keyword metric_thresholds: [Required] A list of metrics to calculate and their associated + thresholds. Required. :paramtype metric_thresholds: list[~azure.mgmt.machinelearningservices.models.PredictionDriftMetricThresholdBase] - :keyword model_type: Required. [Required] The type of the model monitored. Possible values - include: "Classification", "Regression". + :keyword model_type: [Required] The type of the model monitored. Required. Known values are: + "Classification" and "Regression". :paramtype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType - :keyword production_data: Required. [Required] The data which drift will be calculated for. + :keyword production_data: [Required] The data which drift will be calculated for. Required. :paramtype production_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase - :keyword reference_data: Required. [Required] The data to calculate drift against. + :keyword reference_data: [Required] The data to calculate drift against. Required. :paramtype reference_data: ~azure.mgmt.machinelearningservices.models.MonitoringInputDataBase """ - super(PredictionDriftMonitoringSignal, self).__init__(mode=mode, properties=properties, **kwargs) - self.signal_type = 'PredictionDrift' # type: str + super().__init__(mode=mode, properties=properties, **kwargs) + self.signal_type: str = "PredictionDrift" self.metric_thresholds = metric_thresholds self.model_type = model_type self.production_data = production_data self.reference_data = reference_data -class PrivateEndpoint(msrest.serialization.Model): +class PrivateEndpoint(_serialization.Model): """The Private Endpoint resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -25175,24 +25035,20 @@ class PrivateEndpoint(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, + "id": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(PrivateEndpoint, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.id = None -class PrivateEndpointConnection(Resource): +class PrivateEndpointConnection(Resource): # pylint: disable=too-many-instance-attributes """The Private Endpoint Connection resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -25215,7 +25071,7 @@ class PrivateEndpointConnection(Resource): :ivar sku: Optional. This field is required to be implemented by the RP because AML is supporting more than one tier. :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar tags: A set of tags. Dictionary of :code:``. + :ivar tags: Dictionary of :code:``. :vartype tags: dict[str, str] :ivar private_endpoint: The Private Endpoint resource. :vartype private_endpoint: @@ -25223,45 +25079,48 @@ class PrivateEndpointConnection(Resource): :ivar private_link_service_connection_state: The connection state. :vartype private_link_service_connection_state: ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState - :ivar provisioning_state: The current provisioning state. Possible values include: "Succeeded", - "Creating", "Deleting", "Failed". + :ivar provisioning_state: The current provisioning state. Known values are: "Succeeded", + "Creating", "Deleting", and "Failed". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionProvisioningState """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'provisioning_state': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'WorkspacePrivateEndpointResource'}, - 'private_link_service_connection_state': {'key': 'properties.privateLinkServiceConnectionState', 'type': 'PrivateLinkServiceConnectionState'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "location": {"key": "location", "type": "str"}, + "sku": {"key": "sku", "type": "Sku"}, + "tags": {"key": "tags", "type": "{str}"}, + "private_endpoint": {"key": "properties.privateEndpoint", "type": "WorkspacePrivateEndpointResource"}, + "private_link_service_connection_state": { + "key": "properties.privateLinkServiceConnectionState", + "type": "PrivateLinkServiceConnectionState", + }, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, - identity: Optional["ManagedServiceIdentity"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, location: Optional[str] = None, - sku: Optional["Sku"] = None, + sku: Optional["_models.Sku"] = None, tags: Optional[Dict[str, str]] = None, - private_endpoint: Optional["WorkspacePrivateEndpointResource"] = None, - private_link_service_connection_state: Optional["PrivateLinkServiceConnectionState"] = None, - **kwargs - ): + private_endpoint: Optional["_models.WorkspacePrivateEndpointResource"] = None, + private_link_service_connection_state: Optional["_models.PrivateLinkServiceConnectionState"] = None, + provisioning_state: Optional[Union[str, "_models.PrivateEndpointConnectionProvisioningState"]] = None, + **kwargs: Any + ) -> None: """ :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity @@ -25270,7 +25129,7 @@ def __init__( :keyword sku: Optional. This field is required to be implemented by the RP because AML is supporting more than one tier. :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - :keyword tags: A set of tags. Dictionary of :code:``. + :keyword tags: Dictionary of :code:``. :paramtype tags: dict[str, str] :keyword private_endpoint: The Private Endpoint resource. :paramtype private_endpoint: @@ -25278,18 +25137,22 @@ def __init__( :keyword private_link_service_connection_state: The connection state. :paramtype private_link_service_connection_state: ~azure.mgmt.machinelearningservices.models.PrivateLinkServiceConnectionState + :keyword provisioning_state: The current provisioning state. Known values are: "Succeeded", + "Creating", "Deleting", and "Failed". + :paramtype provisioning_state: str or + ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionProvisioningState """ - super(PrivateEndpointConnection, self).__init__(**kwargs) + super().__init__(**kwargs) self.identity = identity self.location = location self.sku = sku self.tags = tags self.private_endpoint = private_endpoint self.private_link_service_connection_state = private_link_service_connection_state - self.provisioning_state = None + self.provisioning_state = provisioning_state -class PrivateEndpointConnectionListResult(msrest.serialization.Model): +class PrivateEndpointConnectionListResult(_serialization.Model): """List of private endpoint connection associated with the specified workspace. :ivar value: Array of private endpoint connections. @@ -25297,42 +25160,38 @@ class PrivateEndpointConnectionListResult(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateEndpointConnection]'}, + "value": {"key": "value", "type": "[PrivateEndpointConnection]"}, } - def __init__( - self, - *, - value: Optional[List["PrivateEndpointConnection"]] = None, - **kwargs - ): + def __init__(self, *, value: Optional[List["_models.PrivateEndpointConnection"]] = None, **kwargs: Any) -> None: """ :keyword value: Array of private endpoint connections. :paramtype value: list[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] """ - super(PrivateEndpointConnectionListResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value -class PrivateEndpointDestination(msrest.serialization.Model): - """Private Endpoint destination for a Private Endpoint Outbound Rule for the managed network of a machine learning workspace. +class PrivateEndpointDestination(_serialization.Model): + """Private Endpoint destination for a Private Endpoint Outbound Rule for the managed network of a + machine learning workspace. :ivar service_resource_id: :vartype service_resource_id: str :ivar spark_enabled: :vartype spark_enabled: bool :ivar spark_status: Type of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Inactive", "Active". + Known values are: "Inactive" and "Active". :vartype spark_status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus :ivar subresource_target: :vartype subresource_target: str """ _attribute_map = { - 'service_resource_id': {'key': 'serviceResourceId', 'type': 'str'}, - 'spark_enabled': {'key': 'sparkEnabled', 'type': 'bool'}, - 'spark_status': {'key': 'sparkStatus', 'type': 'str'}, - 'subresource_target': {'key': 'subresourceTarget', 'type': 'str'}, + "service_resource_id": {"key": "serviceResourceId", "type": "str"}, + "spark_enabled": {"key": "sparkEnabled", "type": "bool"}, + "spark_status": {"key": "sparkStatus", "type": "str"}, + "subresource_target": {"key": "subresourceTarget", "type": "str"}, } def __init__( @@ -25340,22 +25199,22 @@ def __init__( *, service_resource_id: Optional[str] = None, spark_enabled: Optional[bool] = None, - spark_status: Optional[Union[str, "RuleStatus"]] = None, + spark_status: Optional[Union[str, "_models.RuleStatus"]] = None, subresource_target: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword service_resource_id: :paramtype service_resource_id: str :keyword spark_enabled: :paramtype spark_enabled: bool :keyword spark_status: Type of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Inactive", "Active". + Known values are: "Inactive" and "Active". :paramtype spark_status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus :keyword subresource_target: :paramtype subresource_target: str """ - super(PrivateEndpointDestination, self).__init__(**kwargs) + super().__init__(**kwargs) self.service_resource_id = service_resource_id self.spark_enabled = spark_enabled self.spark_status = spark_status @@ -25367,15 +25226,14 @@ class PrivateEndpointOutboundRule(OutboundRule): All required parameters must be populated in order to send to Azure. - :ivar category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". + :ivar category: Category of a managed network outbound rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". :vartype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :ivar status: Type of a managed network Outbound Rule of a machine learning workspace. Possible - values include: "Inactive", "Active". + :ivar status: Type of a managed network outbound rule of a machine learning workspace. Known + values are: "Inactive" and "Active". :vartype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus - :ivar type: Required. Type of a managed network Outbound Rule of a machine learning - workspace.Constant filled by server. Possible values include: "FQDN", "PrivateEndpoint", - "ServiceTag". + :ivar type: Type of a managed network outbound rule of a machine learning workspace. Required. + Known values are: "FQDN", "PrivateEndpoint", and "ServiceTag". :vartype type: str or ~azure.mgmt.machinelearningservices.models.RuleType :ivar destination: Private Endpoint destination for a Private Endpoint Outbound Rule for the managed network of a machine learning workspace. @@ -25383,37 +25241,37 @@ class PrivateEndpointOutboundRule(OutboundRule): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'destination': {'key': 'destination', 'type': 'PrivateEndpointDestination'}, + "category": {"key": "category", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "destination": {"key": "destination", "type": "PrivateEndpointDestination"}, } def __init__( self, *, - category: Optional[Union[str, "RuleCategory"]] = None, - status: Optional[Union[str, "RuleStatus"]] = None, - destination: Optional["PrivateEndpointDestination"] = None, - **kwargs - ): + category: Optional[Union[str, "_models.RuleCategory"]] = None, + status: Optional[Union[str, "_models.RuleStatus"]] = None, + destination: Optional["_models.PrivateEndpointDestination"] = None, + **kwargs: Any + ) -> None: """ - :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". + :keyword category: Category of a managed network outbound rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :keyword status: Type of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Inactive", "Active". + :keyword status: Type of a managed network outbound rule of a machine learning workspace. Known + values are: "Inactive" and "Active". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus :keyword destination: Private Endpoint destination for a Private Endpoint Outbound Rule for the managed network of a machine learning workspace. :paramtype destination: ~azure.mgmt.machinelearningservices.models.PrivateEndpointDestination """ - super(PrivateEndpointOutboundRule, self).__init__(category=category, status=status, **kwargs) - self.type = 'PrivateEndpoint' # type: str + super().__init__(category=category, status=status, **kwargs) + self.type: str = "PrivateEndpoint" self.destination = destination @@ -25429,29 +25287,24 @@ class PrivateEndpointResource(PrivateEndpoint): """ _validation = { - 'id': {'readonly': True}, + "id": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "subnet_arm_id": {"key": "subnetArmId", "type": "str"}, } - def __init__( - self, - *, - subnet_arm_id: Optional[str] = None, - **kwargs - ): + def __init__(self, *, subnet_arm_id: Optional[str] = None, **kwargs: Any) -> None: """ :keyword subnet_arm_id: The subnetId that the private endpoint is connected to. :paramtype subnet_arm_id: str """ - super(PrivateEndpointResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.subnet_arm_id = subnet_arm_id -class PrivateLinkResource(Resource): +class PrivateLinkResource(Resource): # pylint: disable=too-many-instance-attributes """A private link resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -25474,7 +25327,7 @@ class PrivateLinkResource(Resource): :ivar sku: Optional. This field is required to be implemented by the RP because AML is supporting more than one tier. :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar tags: A set of tags. Dictionary of :code:``. + :ivar tags: Dictionary of :code:``. :vartype tags: dict[str, str] :ivar group_id: The private link resource group id. :vartype group_id: str @@ -25485,38 +25338,38 @@ class PrivateLinkResource(Resource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'location': {'key': 'location', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'required_members': {'key': 'properties.requiredMembers', 'type': '[str]'}, - 'required_zone_names': {'key': 'properties.requiredZoneNames', 'type': '[str]'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "location": {"key": "location", "type": "str"}, + "sku": {"key": "sku", "type": "Sku"}, + "tags": {"key": "tags", "type": "{str}"}, + "group_id": {"key": "properties.groupId", "type": "str"}, + "required_members": {"key": "properties.requiredMembers", "type": "[str]"}, + "required_zone_names": {"key": "properties.requiredZoneNames", "type": "[str]"}, } def __init__( self, *, - identity: Optional["ManagedServiceIdentity"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, location: Optional[str] = None, - sku: Optional["Sku"] = None, + sku: Optional["_models.Sku"] = None, tags: Optional[Dict[str, str]] = None, group_id: Optional[str] = None, required_members: Optional[List[str]] = None, required_zone_names: Optional[List[str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity @@ -25525,7 +25378,7 @@ def __init__( :keyword sku: Optional. This field is required to be implemented by the RP because AML is supporting more than one tier. :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - :keyword tags: A set of tags. Dictionary of :code:``. + :keyword tags: Dictionary of :code:``. :paramtype tags: dict[str, str] :keyword group_id: The private link resource group id. :paramtype group_id: str @@ -25534,7 +25387,7 @@ def __init__( :keyword required_zone_names: The private link resource Private link DNS zone name. :paramtype required_zone_names: list[str] """ - super(PrivateLinkResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.identity = identity self.location = location self.sku = sku @@ -25544,7 +25397,7 @@ def __init__( self.required_zone_names = required_zone_names -class PrivateLinkResourceListResult(msrest.serialization.Model): +class PrivateLinkResourceListResult(_serialization.Model): """A list of private link resources. :ivar value: @@ -25552,41 +25405,37 @@ class PrivateLinkResourceListResult(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[PrivateLinkResource]'}, + "value": {"key": "value", "type": "[PrivateLinkResource]"}, } - def __init__( - self, - *, - value: Optional[List["PrivateLinkResource"]] = None, - **kwargs - ): + def __init__(self, *, value: Optional[List["_models.PrivateLinkResource"]] = None, **kwargs: Any) -> None: """ :keyword value: :paramtype value: list[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] """ - super(PrivateLinkResourceListResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value -class PrivateLinkServiceConnectionState(msrest.serialization.Model): - """A collection of information about the state of the connection between service consumer and provider. +class PrivateLinkServiceConnectionState(_serialization.Model): + """A collection of information about the state of the connection between service consumer and + provider. :ivar actions_required: Some RP chose "None". Other RPs use this for region expansion. :vartype actions_required: str :ivar description: User-defined message that, per NRP doc, may be used for approval-related message. :vartype description: str - :ivar status: Connection status of the service consumer with the service provider. Possible - values include: "Approved", "Pending", "Rejected", "Disconnected", "Timeout". + :ivar status: Connection status of the service consumer with the service provider. Known values + are: "Approved", "Pending", "Rejected", "Disconnected", and "Timeout". :vartype status: str or ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ _attribute_map = { - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + "actions_required": {"key": "actionsRequired", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "status": {"key": "status", "type": "str"}, } def __init__( @@ -25594,27 +25443,27 @@ def __init__( *, actions_required: Optional[str] = None, description: Optional[str] = None, - status: Optional[Union[str, "EndpointServiceConnectionStatus"]] = None, - **kwargs - ): + status: Optional[Union[str, "_models.EndpointServiceConnectionStatus"]] = None, + **kwargs: Any + ) -> None: """ :keyword actions_required: Some RP chose "None". Other RPs use this for region expansion. :paramtype actions_required: str :keyword description: User-defined message that, per NRP doc, may be used for approval-related message. :paramtype description: str - :keyword status: Connection status of the service consumer with the service provider. Possible - values include: "Approved", "Pending", "Rejected", "Disconnected", "Timeout". + :keyword status: Connection status of the service consumer with the service provider. Known + values are: "Approved", "Pending", "Rejected", "Disconnected", and "Timeout". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ - super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) + super().__init__(**kwargs) self.actions_required = actions_required self.description = description self.status = status -class ProbeSettings(msrest.serialization.Model): +class ProbeSettings(_serialization.Model): """Deployment container liveness/readiness probe configuration. :ivar failure_threshold: The number of failures to allow before returning an unhealthy status. @@ -25630,23 +25479,23 @@ class ProbeSettings(msrest.serialization.Model): """ _attribute_map = { - 'failure_threshold': {'key': 'failureThreshold', 'type': 'int'}, - 'initial_delay': {'key': 'initialDelay', 'type': 'duration'}, - 'period': {'key': 'period', 'type': 'duration'}, - 'success_threshold': {'key': 'successThreshold', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, + "failure_threshold": {"key": "failureThreshold", "type": "int"}, + "initial_delay": {"key": "initialDelay", "type": "duration"}, + "period": {"key": "period", "type": "duration"}, + "success_threshold": {"key": "successThreshold", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, } def __init__( self, *, - failure_threshold: Optional[int] = 30, + failure_threshold: int = 30, initial_delay: Optional[datetime.timedelta] = None, - period: Optional[datetime.timedelta] = "PT10S", - success_threshold: Optional[int] = 1, - timeout: Optional[datetime.timedelta] = "PT2S", - **kwargs - ): + period: datetime.timedelta = "PT10S", + success_threshold: int = 1, + timeout: datetime.timedelta = "PT2S", + **kwargs: Any + ) -> None: """ :keyword failure_threshold: The number of failures to allow before returning an unhealthy status. @@ -25660,7 +25509,7 @@ def __init__( :keyword timeout: The probe timeout in ISO 8601 format. :paramtype timeout: ~datetime.timedelta """ - super(ProbeSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.failure_threshold = failure_threshold self.initial_delay = initial_delay self.period = period @@ -25668,43 +25517,39 @@ def __init__( self.timeout = timeout -class ProgressMetrics(msrest.serialization.Model): +class ProgressMetrics(_serialization.Model): """Progress metrics definition. Variables are only populated by the server, and will be ignored when sending a request. :ivar completed_datapoint_count: The completed datapoint count. - :vartype completed_datapoint_count: long + :vartype completed_datapoint_count: int :ivar incremental_data_last_refresh_date_time: The time of last successful incremental data refresh in UTC. :vartype incremental_data_last_refresh_date_time: ~datetime.datetime :ivar skipped_datapoint_count: The skipped datapoint count. - :vartype skipped_datapoint_count: long + :vartype skipped_datapoint_count: int :ivar total_datapoint_count: The total datapoint count. - :vartype total_datapoint_count: long + :vartype total_datapoint_count: int """ _validation = { - 'completed_datapoint_count': {'readonly': True}, - 'incremental_data_last_refresh_date_time': {'readonly': True}, - 'skipped_datapoint_count': {'readonly': True}, - 'total_datapoint_count': {'readonly': True}, + "completed_datapoint_count": {"readonly": True}, + "incremental_data_last_refresh_date_time": {"readonly": True}, + "skipped_datapoint_count": {"readonly": True}, + "total_datapoint_count": {"readonly": True}, } _attribute_map = { - 'completed_datapoint_count': {'key': 'completedDatapointCount', 'type': 'long'}, - 'incremental_data_last_refresh_date_time': {'key': 'incrementalDataLastRefreshDateTime', 'type': 'iso-8601'}, - 'skipped_datapoint_count': {'key': 'skippedDatapointCount', 'type': 'long'}, - 'total_datapoint_count': {'key': 'totalDatapointCount', 'type': 'long'}, + "completed_datapoint_count": {"key": "completedDatapointCount", "type": "int"}, + "incremental_data_last_refresh_date_time": {"key": "incrementalDataLastRefreshDateTime", "type": "iso-8601"}, + "skipped_datapoint_count": {"key": "skippedDatapointCount", "type": "int"}, + "total_datapoint_count": {"key": "totalDatapointCount", "type": "int"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ProgressMetrics, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.completed_datapoint_count = None self.incremental_data_last_refresh_date_time = None self.skipped_datapoint_count = None @@ -25716,73 +25561,63 @@ class PyTorch(DistributionConfiguration): All required parameters must be populated in order to send to Azure. - :ivar distribution_type: Required. [Required] Specifies the type of distribution - framework.Constant filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi", - "Ray". + :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. + Known values are: "PyTorch", "TensorFlow", "Mpi", and "Ray". :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType :ivar process_count_per_instance: Number of processes per node. :vartype process_count_per_instance: int """ _validation = { - 'distribution_type': {'required': True}, + "distribution_type": {"required": True}, } _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, - 'process_count_per_instance': {'key': 'processCountPerInstance', 'type': 'int'}, + "distribution_type": {"key": "distributionType", "type": "str"}, + "process_count_per_instance": {"key": "processCountPerInstance", "type": "int"}, } - def __init__( - self, - *, - process_count_per_instance: Optional[int] = None, - **kwargs - ): + def __init__(self, *, process_count_per_instance: Optional[int] = None, **kwargs: Any) -> None: """ :keyword process_count_per_instance: Number of processes per node. :paramtype process_count_per_instance: int """ - super(PyTorch, self).__init__(**kwargs) - self.distribution_type = 'PyTorch' # type: str + super().__init__(**kwargs) + self.distribution_type: str = "PyTorch" self.process_count_per_instance = process_count_per_instance -class QueueSettings(msrest.serialization.Model): +class QueueSettings(_serialization.Model): """QueueSettings. - :ivar job_tier: Controls the compute job tier. Possible values include: "Null", "Spot", - "Basic", "Standard", "Premium". + :ivar job_tier: Controls the compute job tier. Known values are: "Null", "Spot", "Basic", + "Standard", and "Premium". :vartype job_tier: str or ~azure.mgmt.machinelearningservices.models.JobTier :ivar priority: Controls the priority of the job on a compute. :vartype priority: int """ _attribute_map = { - 'job_tier': {'key': 'jobTier', 'type': 'str'}, - 'priority': {'key': 'priority', 'type': 'int'}, + "job_tier": {"key": "jobTier", "type": "str"}, + "priority": {"key": "priority", "type": "int"}, } def __init__( - self, - *, - job_tier: Optional[Union[str, "JobTier"]] = None, - priority: Optional[int] = None, - **kwargs - ): + self, *, job_tier: Optional[Union[str, "_models.JobTier"]] = None, priority: Optional[int] = None, **kwargs: Any + ) -> None: """ - :keyword job_tier: Controls the compute job tier. Possible values include: "Null", "Spot", - "Basic", "Standard", "Premium". + :keyword job_tier: Controls the compute job tier. Known values are: "Null", "Spot", "Basic", + "Standard", and "Premium". :paramtype job_tier: str or ~azure.mgmt.machinelearningservices.models.JobTier :keyword priority: Controls the priority of the job on a compute. :paramtype priority: int """ - super(QueueSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.job_tier = job_tier self.priority = priority -class QuotaBaseProperties(msrest.serialization.Model): +class QuotaBaseProperties(_serialization.Model): """The properties for Quota update or retrieval. :ivar id: Specifies the resource ID. @@ -25790,46 +25625,45 @@ class QuotaBaseProperties(msrest.serialization.Model): :ivar type: Specifies the resource type. :vartype type: str :ivar limit: The maximum permitted quota of the resource. - :vartype limit: long - :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". + :vartype limit: int + :ivar unit: An enum describing the unit of quota measurement. "Count" :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit """ _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "limit": {"key": "limit", "type": "int"}, + "unit": {"key": "unit", "type": "str"}, } def __init__( self, *, - id: Optional[str] = None, + id: Optional[str] = None, # pylint: disable=redefined-builtin type: Optional[str] = None, limit: Optional[int] = None, - unit: Optional[Union[str, "QuotaUnit"]] = None, - **kwargs - ): + unit: Optional[Union[str, "_models.QuotaUnit"]] = None, + **kwargs: Any + ) -> None: """ :keyword id: Specifies the resource ID. :paramtype id: str :keyword type: Specifies the resource type. :paramtype type: str :keyword limit: The maximum permitted quota of the resource. - :paramtype limit: long - :keyword unit: An enum describing the unit of quota measurement. Possible values include: - "Count". + :paramtype limit: int + :keyword unit: An enum describing the unit of quota measurement. "Count" :paramtype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit """ - super(QuotaBaseProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.id = id self.type = type self.limit = limit self.unit = unit -class QuotaUpdateParameters(msrest.serialization.Model): +class QuotaUpdateParameters(_serialization.Model): """Quota update parameters. :ivar value: The list for update quota. @@ -25839,24 +25673,24 @@ class QuotaUpdateParameters(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[QuotaBaseProperties]'}, - 'location': {'key': 'location', 'type': 'str'}, + "value": {"key": "value", "type": "[QuotaBaseProperties]"}, + "location": {"key": "location", "type": "str"}, } def __init__( self, *, - value: Optional[List["QuotaBaseProperties"]] = None, + value: Optional[List["_models.QuotaBaseProperties"]] = None, location: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword value: The list for update quota. :paramtype value: list[~azure.mgmt.machinelearningservices.models.QuotaBaseProperties] :keyword location: Region of workspace quota to be updated. :paramtype location: str """ - super(QuotaUpdateParameters, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value self.location = location @@ -25866,51 +25700,50 @@ class RandomSamplingAlgorithm(SamplingAlgorithm): All required parameters must be populated in order to send to Azure. - :ivar sampling_algorithm_type: Required. [Required] The algorithm used for generating - hyperparameter values, along with configuration properties.Constant filled by server. Possible - values include: "Grid", "Random", "Bayesian". + :ivar sampling_algorithm_type: [Required] The algorithm used for generating hyperparameter + values, along with configuration properties. Required. Known values are: "Grid", "Random", and + "Bayesian". :vartype sampling_algorithm_type: str or ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType :ivar logbase: An optional positive number or e in string format to be used as base for log based random sampling. :vartype logbase: str - :ivar rule: The specific type of random algorithm. Possible values include: "Random", "Sobol". + :ivar rule: The specific type of random algorithm. Known values are: "Random" and "Sobol". :vartype rule: str or ~azure.mgmt.machinelearningservices.models.RandomSamplingAlgorithmRule :ivar seed: An optional integer to use as the seed for random number generation. :vartype seed: int """ _validation = { - 'sampling_algorithm_type': {'required': True}, + "sampling_algorithm_type": {"required": True}, } _attribute_map = { - 'sampling_algorithm_type': {'key': 'samplingAlgorithmType', 'type': 'str'}, - 'logbase': {'key': 'logbase', 'type': 'str'}, - 'rule': {'key': 'rule', 'type': 'str'}, - 'seed': {'key': 'seed', 'type': 'int'}, + "sampling_algorithm_type": {"key": "samplingAlgorithmType", "type": "str"}, + "logbase": {"key": "logbase", "type": "str"}, + "rule": {"key": "rule", "type": "str"}, + "seed": {"key": "seed", "type": "int"}, } def __init__( self, *, logbase: Optional[str] = None, - rule: Optional[Union[str, "RandomSamplingAlgorithmRule"]] = None, + rule: Optional[Union[str, "_models.RandomSamplingAlgorithmRule"]] = None, seed: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword logbase: An optional positive number or e in string format to be used as base for log based random sampling. :paramtype logbase: str - :keyword rule: The specific type of random algorithm. Possible values include: "Random", - "Sobol". + :keyword rule: The specific type of random algorithm. Known values are: "Random" and "Sobol". :paramtype rule: str or ~azure.mgmt.machinelearningservices.models.RandomSamplingAlgorithmRule :keyword seed: An optional integer to use as the seed for random number generation. :paramtype seed: int """ - super(RandomSamplingAlgorithm, self).__init__(**kwargs) - self.sampling_algorithm_type = 'Random' # type: str + super().__init__(**kwargs) + self.sampling_algorithm_type: str = "Random" self.logbase = logbase self.rule = rule self.seed = seed @@ -25921,9 +25754,8 @@ class Ray(DistributionConfiguration): All required parameters must be populated in order to send to Azure. - :ivar distribution_type: Required. [Required] Specifies the type of distribution - framework.Constant filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi", - "Ray". + :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. + Known values are: "PyTorch", "TensorFlow", "Mpi", and "Ray". :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType :ivar address: The address of Ray head node. :vartype address: str @@ -25940,17 +25772,17 @@ class Ray(DistributionConfiguration): """ _validation = { - 'distribution_type': {'required': True}, + "distribution_type": {"required": True}, } _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, - 'address': {'key': 'address', 'type': 'str'}, - 'dashboard_port': {'key': 'dashboardPort', 'type': 'int'}, - 'head_node_additional_args': {'key': 'headNodeAdditionalArgs', 'type': 'str'}, - 'include_dashboard': {'key': 'includeDashboard', 'type': 'bool'}, - 'port': {'key': 'port', 'type': 'int'}, - 'worker_node_additional_args': {'key': 'workerNodeAdditionalArgs', 'type': 'str'}, + "distribution_type": {"key": "distributionType", "type": "str"}, + "address": {"key": "address", "type": "str"}, + "dashboard_port": {"key": "dashboardPort", "type": "int"}, + "head_node_additional_args": {"key": "headNodeAdditionalArgs", "type": "str"}, + "include_dashboard": {"key": "includeDashboard", "type": "bool"}, + "port": {"key": "port", "type": "int"}, + "worker_node_additional_args": {"key": "workerNodeAdditionalArgs", "type": "str"}, } def __init__( @@ -25962,8 +25794,8 @@ def __init__( include_dashboard: Optional[bool] = None, port: Optional[int] = None, worker_node_additional_args: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword address: The address of Ray head node. :paramtype address: str @@ -25978,8 +25810,8 @@ def __init__( :keyword worker_node_additional_args: Additional arguments passed to ray start in worker node. :paramtype worker_node_additional_args: str """ - super(Ray, self).__init__(**kwargs) - self.distribution_type = 'Ray' # type: str + super().__init__(**kwargs) + self.distribution_type: str = "Ray" self.address = address self.dashboard_port = dashboard_port self.head_node_additional_args = head_node_additional_args @@ -25988,11 +25820,11 @@ def __init__( self.worker_node_additional_args = worker_node_additional_args -class Recurrence(msrest.serialization.Model): +class Recurrence(_serialization.Model): """The workflow trigger recurrence for ComputeStartStop schedule type. - :ivar frequency: [Required] The frequency to trigger schedule. Possible values include: - "Minute", "Hour", "Day", "Week", "Month". + :ivar frequency: [Required] The frequency to trigger schedule. Known values are: "Minute", + "Hour", "Day", "Week", and "Month". :vartype frequency: str or ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency :ivar interval: [Required] Specifies schedule interval in conjunction with frequency. :vartype interval: int @@ -26007,26 +25839,26 @@ class Recurrence(msrest.serialization.Model): """ _attribute_map = { - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'interval': {'key': 'interval', 'type': 'int'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + "frequency": {"key": "frequency", "type": "str"}, + "interval": {"key": "interval", "type": "int"}, + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "schedule": {"key": "schedule", "type": "RecurrenceSchedule"}, } def __init__( self, *, - frequency: Optional[Union[str, "RecurrenceFrequency"]] = None, + frequency: Optional[Union[str, "_models.RecurrenceFrequency"]] = None, interval: Optional[int] = None, start_time: Optional[str] = None, - time_zone: Optional[str] = "UTC", - schedule: Optional["RecurrenceSchedule"] = None, - **kwargs - ): + time_zone: str = "UTC", + schedule: Optional["_models.RecurrenceSchedule"] = None, + **kwargs: Any + ) -> None: """ - :keyword frequency: [Required] The frequency to trigger schedule. Possible values include: - "Minute", "Hour", "Day", "Week", "Month". + :keyword frequency: [Required] The frequency to trigger schedule. Known values are: "Minute", + "Hour", "Day", "Week", and "Month". :paramtype frequency: str or ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency :keyword interval: [Required] Specifies schedule interval in conjunction with frequency. :paramtype interval: int @@ -26039,7 +25871,7 @@ def __init__( :keyword schedule: [Required] The recurrence schedule. :paramtype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule """ - super(Recurrence, self).__init__(**kwargs) + super().__init__(**kwargs) self.frequency = frequency self.interval = interval self.start_time = start_time @@ -26047,14 +25879,14 @@ def __init__( self.schedule = schedule -class RecurrenceSchedule(msrest.serialization.Model): +class RecurrenceSchedule(_serialization.Model): """RecurrenceSchedule. All required parameters must be populated in order to send to Azure. - :ivar hours: Required. [Required] List of hours for the schedule. + :ivar hours: [Required] List of hours for the schedule. Required. :vartype hours: list[int] - :ivar minutes: Required. [Required] List of minutes for the schedule. + :ivar minutes: [Required] List of minutes for the schedule. Required. :vartype minutes: list[int] :ivar month_days: List of month days for the schedule. :vartype month_days: list[int] @@ -26063,15 +25895,15 @@ class RecurrenceSchedule(msrest.serialization.Model): """ _validation = { - 'hours': {'required': True}, - 'minutes': {'required': True}, + "hours": {"required": True}, + "minutes": {"required": True}, } _attribute_map = { - 'hours': {'key': 'hours', 'type': '[int]'}, - 'minutes': {'key': 'minutes', 'type': '[int]'}, - 'month_days': {'key': 'monthDays', 'type': '[int]'}, - 'week_days': {'key': 'weekDays', 'type': '[str]'}, + "hours": {"key": "hours", "type": "[int]"}, + "minutes": {"key": "minutes", "type": "[int]"}, + "month_days": {"key": "monthDays", "type": "[int]"}, + "week_days": {"key": "weekDays", "type": "[str]"}, } def __init__( @@ -26080,20 +25912,20 @@ def __init__( hours: List[int], minutes: List[int], month_days: Optional[List[int]] = None, - week_days: Optional[List[Union[str, "WeekDay"]]] = None, - **kwargs - ): + week_days: Optional[List[Union[str, "_models.WeekDay"]]] = None, + **kwargs: Any + ) -> None: """ - :keyword hours: Required. [Required] List of hours for the schedule. + :keyword hours: [Required] List of hours for the schedule. Required. :paramtype hours: list[int] - :keyword minutes: Required. [Required] List of minutes for the schedule. + :keyword minutes: [Required] List of minutes for the schedule. Required. :paramtype minutes: list[int] :keyword month_days: List of month days for the schedule. :paramtype month_days: list[int] :keyword week_days: List of days for the schedule. :paramtype week_days: list[str or ~azure.mgmt.machinelearningservices.models.WeekDay] """ - super(RecurrenceSchedule, self).__init__(**kwargs) + super().__init__(**kwargs) self.hours = hours self.minutes = minutes self.month_days = month_days @@ -26117,45 +25949,44 @@ class RecurrenceTrigger(TriggerBase): TimeZone should follow Windows time zone format. Refer: https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. :vartype time_zone: str - :ivar trigger_type: Required. [Required].Constant filled by server. Possible values include: - "Recurrence", "Cron". + :ivar trigger_type: [Required]. Required. Known values are: "Recurrence" and "Cron". :vartype trigger_type: str or ~azure.mgmt.machinelearningservices.models.TriggerType - :ivar frequency: Required. [Required] The frequency to trigger schedule. Possible values - include: "Minute", "Hour", "Day", "Week", "Month". + :ivar frequency: [Required] The frequency to trigger schedule. Required. Known values are: + "Minute", "Hour", "Day", "Week", and "Month". :vartype frequency: str or ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency - :ivar interval: Required. [Required] Specifies schedule interval in conjunction with frequency. + :ivar interval: [Required] Specifies schedule interval in conjunction with frequency. Required. :vartype interval: int :ivar schedule: The recurrence schedule. :vartype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule """ _validation = { - 'trigger_type': {'required': True}, - 'frequency': {'required': True}, - 'interval': {'required': True}, + "trigger_type": {"required": True}, + "frequency": {"required": True}, + "interval": {"required": True}, } _attribute_map = { - 'end_time': {'key': 'endTime', 'type': 'str'}, - 'start_time': {'key': 'startTime', 'type': 'str'}, - 'time_zone': {'key': 'timeZone', 'type': 'str'}, - 'trigger_type': {'key': 'triggerType', 'type': 'str'}, - 'frequency': {'key': 'frequency', 'type': 'str'}, - 'interval': {'key': 'interval', 'type': 'int'}, - 'schedule': {'key': 'schedule', 'type': 'RecurrenceSchedule'}, + "end_time": {"key": "endTime", "type": "str"}, + "start_time": {"key": "startTime", "type": "str"}, + "time_zone": {"key": "timeZone", "type": "str"}, + "trigger_type": {"key": "triggerType", "type": "str"}, + "frequency": {"key": "frequency", "type": "str"}, + "interval": {"key": "interval", "type": "int"}, + "schedule": {"key": "schedule", "type": "RecurrenceSchedule"}, } def __init__( self, *, - frequency: Union[str, "RecurrenceFrequency"], + frequency: Union[str, "_models.RecurrenceFrequency"], interval: int, end_time: Optional[str] = None, start_time: Optional[str] = None, - time_zone: Optional[str] = "UTC", - schedule: Optional["RecurrenceSchedule"] = None, - **kwargs - ): + time_zone: str = "UTC", + schedule: Optional["_models.RecurrenceSchedule"] = None, + **kwargs: Any + ) -> None: """ :keyword end_time: Specifies end time of schedule in ISO 8601, but without a UTC offset. Refer https://en.wikipedia.org/wiki/ISO_8601. @@ -26169,63 +26000,59 @@ def __init__( TimeZone should follow Windows time zone format. Refer: https://docs.microsoft.com/en-us/windows-hardware/manufacture/desktop/default-time-zones?view=windows-11. :paramtype time_zone: str - :keyword frequency: Required. [Required] The frequency to trigger schedule. Possible values - include: "Minute", "Hour", "Day", "Week", "Month". + :keyword frequency: [Required] The frequency to trigger schedule. Required. Known values are: + "Minute", "Hour", "Day", "Week", and "Month". :paramtype frequency: str or ~azure.mgmt.machinelearningservices.models.RecurrenceFrequency - :keyword interval: Required. [Required] Specifies schedule interval in conjunction with - frequency. + :keyword interval: [Required] Specifies schedule interval in conjunction with frequency. + Required. :paramtype interval: int :keyword schedule: The recurrence schedule. :paramtype schedule: ~azure.mgmt.machinelearningservices.models.RecurrenceSchedule """ - super(RecurrenceTrigger, self).__init__(end_time=end_time, start_time=start_time, time_zone=time_zone, **kwargs) - self.trigger_type = 'Recurrence' # type: str + super().__init__(end_time=end_time, start_time=start_time, time_zone=time_zone, **kwargs) + self.trigger_type: str = "Recurrence" self.frequency = frequency self.interval = interval self.schedule = schedule -class RegenerateEndpointKeysRequest(msrest.serialization.Model): +class RegenerateEndpointKeysRequest(_serialization.Model): """RegenerateEndpointKeysRequest. All required parameters must be populated in order to send to Azure. - :ivar key_type: Required. [Required] Specification for which type of key to generate. Primary - or Secondary. Possible values include: "Primary", "Secondary". + :ivar key_type: [Required] Specification for which type of key to generate. Primary or + Secondary. Required. Known values are: "Primary" and "Secondary". :vartype key_type: str or ~azure.mgmt.machinelearningservices.models.KeyType :ivar key_value: The value the key is set to. :vartype key_value: str """ _validation = { - 'key_type': {'required': True}, + "key_type": {"required": True}, } _attribute_map = { - 'key_type': {'key': 'keyType', 'type': 'str'}, - 'key_value': {'key': 'keyValue', 'type': 'str'}, + "key_type": {"key": "keyType", "type": "str"}, + "key_value": {"key": "keyValue", "type": "str"}, } def __init__( - self, - *, - key_type: Union[str, "KeyType"], - key_value: Optional[str] = None, - **kwargs - ): + self, *, key_type: Union[str, "_models.KeyType"], key_value: Optional[str] = None, **kwargs: Any + ) -> None: """ - :keyword key_type: Required. [Required] Specification for which type of key to generate. - Primary or Secondary. Possible values include: "Primary", "Secondary". + :keyword key_type: [Required] Specification for which type of key to generate. Primary or + Secondary. Required. Known values are: "Primary" and "Secondary". :paramtype key_type: str or ~azure.mgmt.machinelearningservices.models.KeyType :keyword key_value: The value the key is set to. :paramtype key_value: str """ - super(RegenerateEndpointKeysRequest, self).__init__(**kwargs) + super().__init__(**kwargs) self.key_type = key_type self.key_value = key_value -class Registry(TrackedResource): +class Registry(TrackedResource): # pylint: disable=too-many-instance-attributes """Registry. Variables are only populated by the server, and will be ignored when sending a request. @@ -26243,9 +26070,9 @@ class Registry(TrackedResource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. + :ivar location: The geo-location where the resource lives. Required. :vartype location: str :ivar identity: Managed service identity (system assigned and/or user assigned identities). :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity @@ -26276,30 +26103,33 @@ class Registry(TrackedResource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'}, - 'intellectual_property_publisher': {'key': 'properties.intellectualPropertyPublisher', 'type': 'str'}, - 'managed_resource_group': {'key': 'properties.managedResourceGroup', 'type': 'ArmResourceId'}, - 'ml_flow_registry_uri': {'key': 'properties.mlFlowRegistryUri', 'type': 'str'}, - 'registry_private_endpoint_connections': {'key': 'properties.registryPrivateEndpointConnections', 'type': '[RegistryPrivateEndpointConnection]'}, - 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, - 'region_details': {'key': 'properties.regionDetails', 'type': '[RegistryRegionArmDetails]'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "sku": {"key": "sku", "type": "Sku"}, + "discovery_url": {"key": "properties.discoveryUrl", "type": "str"}, + "intellectual_property_publisher": {"key": "properties.intellectualPropertyPublisher", "type": "str"}, + "managed_resource_group": {"key": "properties.managedResourceGroup", "type": "ArmResourceId"}, + "ml_flow_registry_uri": {"key": "properties.mlFlowRegistryUri", "type": "str"}, + "registry_private_endpoint_connections": { + "key": "properties.registryPrivateEndpointConnections", + "type": "[RegistryPrivateEndpointConnection]", + }, + "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, + "region_details": {"key": "properties.regionDetails", "type": "[RegistryRegionArmDetails]"}, } def __init__( @@ -26307,22 +26137,22 @@ def __init__( *, location: str, tags: Optional[Dict[str, str]] = None, - identity: Optional["ManagedServiceIdentity"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, kind: Optional[str] = None, - sku: Optional["Sku"] = None, + sku: Optional["_models.Sku"] = None, discovery_url: Optional[str] = None, intellectual_property_publisher: Optional[str] = None, - managed_resource_group: Optional["ArmResourceId"] = None, + managed_resource_group: Optional["_models.ArmResourceId"] = None, ml_flow_registry_uri: Optional[str] = None, - registry_private_endpoint_connections: Optional[List["RegistryPrivateEndpointConnection"]] = None, + registry_private_endpoint_connections: Optional[List["_models.RegistryPrivateEndpointConnection"]] = None, public_network_access: Optional[str] = None, - region_details: Optional[List["RegistryRegionArmDetails"]] = None, - **kwargs - ): + region_details: Optional[List["_models.RegistryRegionArmDetails"]] = None, + **kwargs: Any + ) -> None: """ - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. + :keyword location: The geo-location where the resource lives. Required. :paramtype location: str :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity @@ -26351,7 +26181,7 @@ def __init__( :paramtype region_details: list[~azure.mgmt.machinelearningservices.models.RegistryRegionArmDetails] """ - super(Registry, self).__init__(tags=tags, location=location, **kwargs) + super().__init__(tags=tags, location=location, **kwargs) self.identity = identity self.kind = kind self.sku = sku @@ -26364,7 +26194,7 @@ def __init__( self.region_details = region_details -class RegistryListCredentialsResult(msrest.serialization.Model): +class RegistryListCredentialsResult(_serialization.Model): """RegistryListCredentialsResult. Variables are only populated by the server, and will be ignored when sending a request. @@ -26378,27 +26208,22 @@ class RegistryListCredentialsResult(msrest.serialization.Model): """ _validation = { - 'location': {'readonly': True}, - 'username': {'readonly': True}, + "location": {"readonly": True}, + "username": {"readonly": True}, } _attribute_map = { - 'location': {'key': 'location', 'type': 'str'}, - 'passwords': {'key': 'passwords', 'type': '[Password]'}, - 'username': {'key': 'username', 'type': 'str'}, + "location": {"key": "location", "type": "str"}, + "passwords": {"key": "passwords", "type": "[Password]"}, + "username": {"key": "username", "type": "str"}, } - def __init__( - self, - *, - passwords: Optional[List["Password"]] = None, - **kwargs - ): + def __init__(self, *, passwords: Optional[List["_models.Password"]] = None, **kwargs: Any) -> None: """ :keyword passwords: :paramtype passwords: list[~azure.mgmt.machinelearningservices.models.Password] """ - super(RegistryListCredentialsResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.location = None self.passwords = passwords self.username = None @@ -26417,9 +26242,9 @@ class RegistryPartialManagedServiceIdentity(ManagedServiceIdentity): :ivar tenant_id: The tenant ID of the system assigned identity. This property will only be provided for a system assigned identity. :vartype tenant_id: str - :ivar type: Required. Type of managed service identity (where both SystemAssigned and - UserAssigned types are allowed). Possible values include: "None", "SystemAssigned", - "UserAssigned", "SystemAssigned,UserAssigned". + :ivar type: Type of managed service identity (where both SystemAssigned and UserAssigned types + are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and + "SystemAssigned,UserAssigned". :vartype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType :ivar user_assigned_identities: The set of user assigned identities associated with the resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: @@ -26430,29 +26255,29 @@ class RegistryPartialManagedServiceIdentity(ManagedServiceIdentity): """ _validation = { - 'principal_id': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'type': {'required': True}, + "principal_id": {"readonly": True}, + "tenant_id": {"readonly": True}, + "type": {"required": True}, } _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'user_assigned_identities': {'key': 'userAssignedIdentities', 'type': '{UserAssignedIdentity}'}, + "principal_id": {"key": "principalId", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "user_assigned_identities": {"key": "userAssignedIdentities", "type": "{UserAssignedIdentity}"}, } def __init__( self, *, - type: Union[str, "ManagedServiceIdentityType"], - user_assigned_identities: Optional[Dict[str, "UserAssignedIdentity"]] = None, - **kwargs - ): + type: Union[str, "_models.ManagedServiceIdentityType"], + user_assigned_identities: Optional[Dict[str, "_models.UserAssignedIdentity"]] = None, + **kwargs: Any + ) -> None: """ - :keyword type: Required. Type of managed service identity (where both SystemAssigned and - UserAssigned types are allowed). Possible values include: "None", "SystemAssigned", - "UserAssigned", "SystemAssigned,UserAssigned". + :keyword type: Type of managed service identity (where both SystemAssigned and UserAssigned + types are allowed). Required. Known values are: "None", "SystemAssigned", "UserAssigned", and + "SystemAssigned,UserAssigned". :paramtype type: str or ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentityType :keyword user_assigned_identities: The set of user assigned identities associated with the resource. The userAssignedIdentities dictionary keys will be ARM resource ids in the form: @@ -26461,10 +26286,10 @@ def __init__( :paramtype user_assigned_identities: dict[str, ~azure.mgmt.machinelearningservices.models.UserAssignedIdentity] """ - super(RegistryPartialManagedServiceIdentity, self).__init__(type=type, user_assigned_identities=user_assigned_identities, **kwargs) + super().__init__(type=type, user_assigned_identities=user_assigned_identities, **kwargs) -class RegistryPrivateEndpointConnection(msrest.serialization.Model): +class RegistryPrivateEndpointConnection(_serialization.Model): """Private endpoint connection definition. :ivar id: This is the private endpoint connection name created on SRP @@ -26486,25 +26311,30 @@ class RegistryPrivateEndpointConnection(msrest.serialization.Model): """ _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'group_ids': {'key': 'properties.groupIds', 'type': '[str]'}, - 'private_endpoint': {'key': 'properties.privateEndpoint', 'type': 'PrivateEndpointResource'}, - 'registry_private_link_service_connection_state': {'key': 'properties.registryPrivateLinkServiceConnectionState', 'type': 'RegistryPrivateLinkServiceConnectionState'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "location": {"key": "location", "type": "str"}, + "group_ids": {"key": "properties.groupIds", "type": "[str]"}, + "private_endpoint": {"key": "properties.privateEndpoint", "type": "PrivateEndpointResource"}, + "registry_private_link_service_connection_state": { + "key": "properties.registryPrivateLinkServiceConnectionState", + "type": "RegistryPrivateLinkServiceConnectionState", + }, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, } def __init__( self, *, - id: Optional[str] = None, + id: Optional[str] = None, # pylint: disable=redefined-builtin location: Optional[str] = None, group_ids: Optional[List[str]] = None, - private_endpoint: Optional["PrivateEndpointResource"] = None, - registry_private_link_service_connection_state: Optional["RegistryPrivateLinkServiceConnectionState"] = None, + private_endpoint: Optional["_models.PrivateEndpointResource"] = None, + registry_private_link_service_connection_state: Optional[ + "_models.RegistryPrivateLinkServiceConnectionState" + ] = None, provisioning_state: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword id: This is the private endpoint connection name created on SRP Full resource id: @@ -26523,7 +26353,7 @@ def __init__( approved, it's null. :paramtype provisioning_state: str """ - super(RegistryPrivateEndpointConnection, self).__init__(**kwargs) + super().__init__(**kwargs) self.id = id self.location = location self.group_ids = group_ids @@ -26532,7 +26362,7 @@ def __init__( self.provisioning_state = provisioning_state -class RegistryPrivateLinkServiceConnectionState(msrest.serialization.Model): +class RegistryPrivateLinkServiceConnectionState(_serialization.Model): """The connection state. :ivar actions_required: Some RP chose "None". Other RPs use this for region expansion. @@ -26540,16 +26370,16 @@ class RegistryPrivateLinkServiceConnectionState(msrest.serialization.Model): :ivar description: User-defined message that, per NRP doc, may be used for approval-related message. :vartype description: str - :ivar status: Connection status of the service consumer with the service provider. Possible - values include: "Approved", "Pending", "Rejected", "Disconnected", "Timeout". + :ivar status: Connection status of the service consumer with the service provider. Known values + are: "Approved", "Pending", "Rejected", "Disconnected", and "Timeout". :vartype status: str or ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ _attribute_map = { - 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + "actions_required": {"key": "actionsRequired", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "status": {"key": "status", "type": "str"}, } def __init__( @@ -26557,27 +26387,27 @@ def __init__( *, actions_required: Optional[str] = None, description: Optional[str] = None, - status: Optional[Union[str, "EndpointServiceConnectionStatus"]] = None, - **kwargs - ): + status: Optional[Union[str, "_models.EndpointServiceConnectionStatus"]] = None, + **kwargs: Any + ) -> None: """ :keyword actions_required: Some RP chose "None". Other RPs use this for region expansion. :paramtype actions_required: str :keyword description: User-defined message that, per NRP doc, may be used for approval-related message. :paramtype description: str - :keyword status: Connection status of the service consumer with the service provider. Possible - values include: "Approved", "Pending", "Rejected", "Disconnected", "Timeout". + :keyword status: Connection status of the service consumer with the service provider. Known + values are: "Approved", "Pending", "Rejected", "Disconnected", and "Timeout". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ - super(RegistryPrivateLinkServiceConnectionState, self).__init__(**kwargs) + super().__init__(**kwargs) self.actions_required = actions_required self.description = description self.status = status -class RegistryRegionArmDetails(msrest.serialization.Model): +class RegistryRegionArmDetails(_serialization.Model): """Details for each region the registry is in. :ivar acr_details: List of ACR accounts. @@ -26590,19 +26420,19 @@ class RegistryRegionArmDetails(msrest.serialization.Model): """ _attribute_map = { - 'acr_details': {'key': 'acrDetails', 'type': '[AcrDetails]'}, - 'location': {'key': 'location', 'type': 'str'}, - 'storage_account_details': {'key': 'storageAccountDetails', 'type': '[StorageAccountDetails]'}, + "acr_details": {"key": "acrDetails", "type": "[AcrDetails]"}, + "location": {"key": "location", "type": "str"}, + "storage_account_details": {"key": "storageAccountDetails", "type": "[StorageAccountDetails]"}, } def __init__( self, *, - acr_details: Optional[List["AcrDetails"]] = None, + acr_details: Optional[List["_models.AcrDetails"]] = None, location: Optional[str] = None, - storage_account_details: Optional[List["StorageAccountDetails"]] = None, - **kwargs - ): + storage_account_details: Optional[List["_models.StorageAccountDetails"]] = None, + **kwargs: Any + ) -> None: """ :keyword acr_details: List of ACR accounts. :paramtype acr_details: list[~azure.mgmt.machinelearningservices.models.AcrDetails] @@ -26612,13 +26442,13 @@ def __init__( :paramtype storage_account_details: list[~azure.mgmt.machinelearningservices.models.StorageAccountDetails] """ - super(RegistryRegionArmDetails, self).__init__(**kwargs) + super().__init__(**kwargs) self.acr_details = acr_details self.location = location self.storage_account_details = storage_account_details -class RegistryTrackedResourceArmPaginatedResult(msrest.serialization.Model): +class RegistryTrackedResourceArmPaginatedResult(_serialization.Model): """A paginated list of Registry entities. :ivar next_link: The link to the next page of Registry objects. If null, there are no @@ -26629,17 +26459,13 @@ class RegistryTrackedResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Registry]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Registry]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["Registry"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.Registry"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of Registry objects. If null, there are no additional pages. @@ -26647,16 +26473,29 @@ def __init__( :keyword value: An array of objects of type Registry. :paramtype value: list[~azure.mgmt.machinelearningservices.models.Registry] """ - super(RegistryTrackedResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class Regression(AutoMLVertical, TableVertical): +class Regression(TableVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes """Regression task in AutoML Table vertical. All required parameters must be populated in order to send to Azure. + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar cv_split_column_names: Columns to use for CVSplit data. :vartype cv_split_column_names: list[str] :ivar featurization_settings: Featurization inputs needed for AutoML job. @@ -26692,21 +26531,8 @@ class Regression(AutoMLVertical, TableVertical): :ivar weight_column_name: The name of the sample weight column. Automated ML supports a weighted column as an input, causing rows in the data to be weighted up or down. :vartype weight_column_name: str - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric for regression task. Possible values include: - "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", + :ivar primary_metric: Primary metric for regression task. Known values are: + "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", and "NormalizedMeanAbsoluteError". :vartype primary_metric: str or ~azure.mgmt.machinelearningservices.models.RegressionPrimaryMetrics @@ -26716,54 +26542,62 @@ class Regression(AutoMLVertical, TableVertical): """ _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'cv_split_column_names': {'key': 'cvSplitColumnNames', 'type': '[str]'}, - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'TableVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'TableFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'TableVerticalLimitSettings'}, - 'n_cross_validations': {'key': 'nCrossValidations', 'type': 'NCrossValidations'}, - 'search_space': {'key': 'searchSpace', 'type': '[TableParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'TableSweepSettings'}, - 'test_data': {'key': 'testData', 'type': 'MLTableJobInput'}, - 'test_data_size': {'key': 'testDataSize', 'type': 'float'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'validation_data_size': {'key': 'validationDataSize', 'type': 'float'}, - 'weight_column_name': {'key': 'weightColumnName', 'type': 'str'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, - 'training_settings': {'key': 'trainingSettings', 'type': 'RegressionTrainingSettings'}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "cv_split_column_names": {"key": "cvSplitColumnNames", "type": "[str]"}, + "featurization_settings": {"key": "featurizationSettings", "type": "TableVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "TableFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "TableVerticalLimitSettings"}, + "n_cross_validations": {"key": "nCrossValidations", "type": "NCrossValidations"}, + "search_space": {"key": "searchSpace", "type": "[TableParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "TableSweepSettings"}, + "test_data": {"key": "testData", "type": "MLTableJobInput"}, + "test_data_size": {"key": "testDataSize", "type": "float"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "validation_data_size": {"key": "validationDataSize", "type": "float"}, + "weight_column_name": {"key": "weightColumnName", "type": "str"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, + "training_settings": {"key": "trainingSettings", "type": "RegressionTrainingSettings"}, } def __init__( self, *, - training_data: "MLTableJobInput", + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, + target_column_name: Optional[str] = None, cv_split_column_names: Optional[List[str]] = None, - featurization_settings: Optional["TableVerticalFeaturizationSettings"] = None, - fixed_parameters: Optional["TableFixedParameters"] = None, - limit_settings: Optional["TableVerticalLimitSettings"] = None, - n_cross_validations: Optional["NCrossValidations"] = None, - search_space: Optional[List["TableParameterSubspace"]] = None, - sweep_settings: Optional["TableSweepSettings"] = None, - test_data: Optional["MLTableJobInput"] = None, + featurization_settings: Optional["_models.TableVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.TableFixedParameters"] = None, + limit_settings: Optional["_models.TableVerticalLimitSettings"] = None, + n_cross_validations: Optional["_models.NCrossValidations"] = None, + search_space: Optional[List["_models.TableParameterSubspace"]] = None, + sweep_settings: Optional["_models.TableSweepSettings"] = None, + test_data: Optional["_models.MLTableJobInput"] = None, test_data_size: Optional[float] = None, - validation_data: Optional["MLTableJobInput"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, validation_data_size: Optional[float] = None, weight_column_name: Optional[str] = None, - log_verbosity: Optional[Union[str, "LogVerbosity"]] = None, - target_column_name: Optional[str] = None, - primary_metric: Optional[Union[str, "RegressionPrimaryMetrics"]] = None, - training_settings: Optional["RegressionTrainingSettings"] = None, - **kwargs - ): + primary_metric: Optional[Union[str, "_models.RegressionPrimaryMetrics"]] = None, + training_settings: Optional["_models.RegressionTrainingSettings"] = None, + **kwargs: Any + ) -> None: """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :keyword cv_split_column_names: Columns to use for CVSplit data. :paramtype cv_split_column_names: list[str] :keyword featurization_settings: Featurization inputs needed for AutoML job. @@ -26802,16 +26636,8 @@ def __init__( :keyword weight_column_name: The name of the sample weight column. Automated ML supports a weighted column as an input, causing rows in the data to be weighted up or down. :paramtype weight_column_name: str - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword primary_metric: Primary metric for regression task. Possible values include: - "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", + :keyword primary_metric: Primary metric for regression task. Known values are: + "SpearmanCorrelation", "NormalizedRootMeanSquaredError", "R2Score", and "NormalizedMeanAbsoluteError". :paramtype primary_metric: str or ~azure.mgmt.machinelearningservices.models.RegressionPrimaryMetrics @@ -26819,7 +26645,30 @@ def __init__( :paramtype training_settings: ~azure.mgmt.machinelearningservices.models.RegressionTrainingSettings """ - super(Regression, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, cv_split_column_names=cv_split_column_names, featurization_settings=featurization_settings, fixed_parameters=fixed_parameters, limit_settings=limit_settings, n_cross_validations=n_cross_validations, search_space=search_space, sweep_settings=sweep_settings, test_data=test_data, test_data_size=test_data_size, validation_data=validation_data, validation_data_size=validation_data_size, weight_column_name=weight_column_name, **kwargs) + super().__init__( + cv_split_column_names=cv_split_column_names, + featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, + limit_settings=limit_settings, + n_cross_validations=n_cross_validations, + search_space=search_space, + sweep_settings=sweep_settings, + test_data=test_data, + test_data_size=test_data_size, + validation_data=validation_data, + validation_data_size=validation_data_size, + weight_column_name=weight_column_name, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type: str = "Regression" + self.training_data = training_data + self.primary_metric = primary_metric + self.training_settings = training_settings self.cv_split_column_names = cv_split_column_names self.featurization_settings = featurization_settings self.fixed_parameters = fixed_parameters @@ -26832,12 +26681,6 @@ def __init__( self.validation_data = validation_data self.validation_data_size = validation_data_size self.weight_column_name = weight_column_name - self.task_type = 'Regression' # type: str - self.primary_metric = primary_metric - self.training_settings = training_settings - self.log_verbosity = log_verbosity - self.target_column_name = target_column_name - self.training_data = training_data class RegressionModelPerformanceMetricThreshold(ModelPerformanceMetricThresholdBase): @@ -26845,47 +26688,47 @@ class RegressionModelPerformanceMetricThreshold(ModelPerformanceMetricThresholdB All required parameters must be populated in order to send to Azure. - :ivar model_type: Required. [Required] Specifies the data type of the metric threshold.Constant - filled by server. Possible values include: "Classification", "Regression". + :ivar model_type: [Required] Specifies the data type of the metric threshold. Required. Known + values are: "Classification" and "Regression". :vartype model_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringModelType :ivar threshold: The threshold value. If null, a default value will be set depending on the selected metric. :vartype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :ivar metric: Required. [Required] The regression model performance metric to calculate. - Possible values include: "MeanAbsoluteError", "RootMeanSquaredError", "MeanSquaredError". + :ivar metric: [Required] The regression model performance metric to calculate. Required. Known + values are: "MeanAbsoluteError", "RootMeanSquaredError", and "MeanSquaredError". :vartype metric: str or ~azure.mgmt.machinelearningservices.models.RegressionModelPerformanceMetric """ _validation = { - 'model_type': {'required': True}, - 'metric': {'required': True}, + "model_type": {"required": True}, + "metric": {"required": True}, } _attribute_map = { - 'model_type': {'key': 'modelType', 'type': 'str'}, - 'threshold': {'key': 'threshold', 'type': 'MonitoringThreshold'}, - 'metric': {'key': 'metric', 'type': 'str'}, + "model_type": {"key": "modelType", "type": "str"}, + "threshold": {"key": "threshold", "type": "MonitoringThreshold"}, + "metric": {"key": "metric", "type": "str"}, } def __init__( self, *, - metric: Union[str, "RegressionModelPerformanceMetric"], - threshold: Optional["MonitoringThreshold"] = None, - **kwargs - ): + metric: Union[str, "_models.RegressionModelPerformanceMetric"], + threshold: Optional["_models.MonitoringThreshold"] = None, + **kwargs: Any + ) -> None: """ :keyword threshold: The threshold value. If null, a default value will be set depending on the selected metric. :paramtype threshold: ~azure.mgmt.machinelearningservices.models.MonitoringThreshold - :keyword metric: Required. [Required] The regression model performance metric to calculate. - Possible values include: "MeanAbsoluteError", "RootMeanSquaredError", "MeanSquaredError". + :keyword metric: [Required] The regression model performance metric to calculate. Required. + Known values are: "MeanAbsoluteError", "RootMeanSquaredError", and "MeanSquaredError". :paramtype metric: str or ~azure.mgmt.machinelearningservices.models.RegressionModelPerformanceMetric """ - super(RegressionModelPerformanceMetricThreshold, self).__init__(threshold=threshold, **kwargs) - self.model_type = 'Regression' # type: str + super().__init__(threshold=threshold, **kwargs) + self.model_type: str = "Regression" self.metric = metric @@ -26914,8 +26757,8 @@ class RegressionTrainingSettings(TrainingSettings): mode selection. Default is 'auto'. If 'Distributed' then only distributed featurization is used and distributed algorithms are chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". :vartype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode :ivar allowed_training_algorithms: Allowed models for regression task. :vartype allowed_training_algorithms: list[str or @@ -26926,33 +26769,33 @@ class RegressionTrainingSettings(TrainingSettings): """ _attribute_map = { - 'enable_dnn_training': {'key': 'enableDnnTraining', 'type': 'bool'}, - 'enable_model_explainability': {'key': 'enableModelExplainability', 'type': 'bool'}, - 'enable_onnx_compatible_models': {'key': 'enableOnnxCompatibleModels', 'type': 'bool'}, - 'enable_stack_ensemble': {'key': 'enableStackEnsemble', 'type': 'bool'}, - 'enable_vote_ensemble': {'key': 'enableVoteEnsemble', 'type': 'bool'}, - 'ensemble_model_download_timeout': {'key': 'ensembleModelDownloadTimeout', 'type': 'duration'}, - 'stack_ensemble_settings': {'key': 'stackEnsembleSettings', 'type': 'StackEnsembleSettings'}, - 'training_mode': {'key': 'trainingMode', 'type': 'str'}, - 'allowed_training_algorithms': {'key': 'allowedTrainingAlgorithms', 'type': '[str]'}, - 'blocked_training_algorithms': {'key': 'blockedTrainingAlgorithms', 'type': '[str]'}, + "enable_dnn_training": {"key": "enableDnnTraining", "type": "bool"}, + "enable_model_explainability": {"key": "enableModelExplainability", "type": "bool"}, + "enable_onnx_compatible_models": {"key": "enableOnnxCompatibleModels", "type": "bool"}, + "enable_stack_ensemble": {"key": "enableStackEnsemble", "type": "bool"}, + "enable_vote_ensemble": {"key": "enableVoteEnsemble", "type": "bool"}, + "ensemble_model_download_timeout": {"key": "ensembleModelDownloadTimeout", "type": "duration"}, + "stack_ensemble_settings": {"key": "stackEnsembleSettings", "type": "StackEnsembleSettings"}, + "training_mode": {"key": "trainingMode", "type": "str"}, + "allowed_training_algorithms": {"key": "allowedTrainingAlgorithms", "type": "[str]"}, + "blocked_training_algorithms": {"key": "blockedTrainingAlgorithms", "type": "[str]"}, } def __init__( self, *, - enable_dnn_training: Optional[bool] = False, - enable_model_explainability: Optional[bool] = True, - enable_onnx_compatible_models: Optional[bool] = False, - enable_stack_ensemble: Optional[bool] = True, - enable_vote_ensemble: Optional[bool] = True, - ensemble_model_download_timeout: Optional[datetime.timedelta] = "PT5M", - stack_ensemble_settings: Optional["StackEnsembleSettings"] = None, - training_mode: Optional[Union[str, "TrainingMode"]] = None, - allowed_training_algorithms: Optional[List[Union[str, "RegressionModels"]]] = None, - blocked_training_algorithms: Optional[List[Union[str, "RegressionModels"]]] = None, - **kwargs - ): + enable_dnn_training: bool = False, + enable_model_explainability: bool = True, + enable_onnx_compatible_models: bool = False, + enable_stack_ensemble: bool = True, + enable_vote_ensemble: bool = True, + ensemble_model_download_timeout: datetime.timedelta = "PT5M", + stack_ensemble_settings: Optional["_models.StackEnsembleSettings"] = None, + training_mode: Optional[Union[str, "_models.TrainingMode"]] = None, + allowed_training_algorithms: Optional[List[Union[str, "_models.RegressionModels"]]] = None, + blocked_training_algorithms: Optional[List[Union[str, "_models.RegressionModels"]]] = None, + **kwargs: Any + ) -> None: """ :keyword enable_dnn_training: Enable recommendation of DNN models. :paramtype enable_dnn_training: bool @@ -26976,8 +26819,8 @@ def __init__( mode selection. Default is 'auto'. If 'Distributed' then only distributed featurization is used and distributed algorithms are chosen. - If 'NonDistributed' then only non distributed algorithms are chosen. Possible values include: - "Auto", "Distributed", "NonDistributed". + If 'NonDistributed' then only non distributed algorithms are chosen. Known values are: "Auto", + "Distributed", and "NonDistributed". :paramtype training_mode: str or ~azure.mgmt.machinelearningservices.models.TrainingMode :keyword allowed_training_algorithms: Allowed models for regression task. :paramtype allowed_training_algorithms: list[str or @@ -26986,12 +26829,22 @@ def __init__( :paramtype blocked_training_algorithms: list[str or ~azure.mgmt.machinelearningservices.models.RegressionModels] """ - super(RegressionTrainingSettings, self).__init__(enable_dnn_training=enable_dnn_training, enable_model_explainability=enable_model_explainability, enable_onnx_compatible_models=enable_onnx_compatible_models, enable_stack_ensemble=enable_stack_ensemble, enable_vote_ensemble=enable_vote_ensemble, ensemble_model_download_timeout=ensemble_model_download_timeout, stack_ensemble_settings=stack_ensemble_settings, training_mode=training_mode, **kwargs) + super().__init__( + enable_dnn_training=enable_dnn_training, + enable_model_explainability=enable_model_explainability, + enable_onnx_compatible_models=enable_onnx_compatible_models, + enable_stack_ensemble=enable_stack_ensemble, + enable_vote_ensemble=enable_vote_ensemble, + ensemble_model_download_timeout=ensemble_model_download_timeout, + stack_ensemble_settings=stack_ensemble_settings, + training_mode=training_mode, + **kwargs + ) self.allowed_training_algorithms = allowed_training_algorithms self.blocked_training_algorithms = blocked_training_algorithms -class RequestLogging(msrest.serialization.Model): +class RequestLogging(_serialization.Model): """RequestLogging. :ivar capture_headers: For payload logging, we only collect payload by default. If customers @@ -27001,26 +26854,21 @@ class RequestLogging(msrest.serialization.Model): """ _attribute_map = { - 'capture_headers': {'key': 'captureHeaders', 'type': '[str]'}, + "capture_headers": {"key": "captureHeaders", "type": "[str]"}, } - def __init__( - self, - *, - capture_headers: Optional[List[str]] = None, - **kwargs - ): + def __init__(self, *, capture_headers: Optional[List[str]] = None, **kwargs: Any) -> None: """ :keyword capture_headers: For payload logging, we only collect payload by default. If customers also want to collect the specified headers, they can set them in captureHeaders so that backend will collect those headers along with payload. :paramtype capture_headers: list[str] """ - super(RequestLogging, self).__init__(**kwargs) + super().__init__(**kwargs) self.capture_headers = capture_headers -class ResizeSchema(msrest.serialization.Model): +class ResizeSchema(_serialization.Model): """Schema for Compute Instance resize. :ivar target_vm_size: The name of the virtual machine size. @@ -27028,55 +26876,45 @@ class ResizeSchema(msrest.serialization.Model): """ _attribute_map = { - 'target_vm_size': {'key': 'targetVMSize', 'type': 'str'}, + "target_vm_size": {"key": "targetVMSize", "type": "str"}, } - def __init__( - self, - *, - target_vm_size: Optional[str] = None, - **kwargs - ): + def __init__(self, *, target_vm_size: Optional[str] = None, **kwargs: Any) -> None: """ :keyword target_vm_size: The name of the virtual machine size. :paramtype target_vm_size: str """ - super(ResizeSchema, self).__init__(**kwargs) + super().__init__(**kwargs) self.target_vm_size = target_vm_size -class ResourceId(msrest.serialization.Model): +class ResourceId(_serialization.Model): """Represents a resource ID. For example, for a subnet, it is the resource URL for the subnet. All required parameters must be populated in order to send to Azure. - :ivar id: Required. The ID of the resource. + :ivar id: The ID of the resource. Required. :vartype id: str """ _validation = { - 'id': {'required': True}, + "id": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, } - def __init__( - self, - *, - id: str, - **kwargs - ): + def __init__(self, *, id: str, **kwargs: Any) -> None: # pylint: disable=redefined-builtin """ - :keyword id: Required. The ID of the resource. + :keyword id: The ID of the resource. Required. :paramtype id: str """ - super(ResourceId, self).__init__(**kwargs) + super().__init__(**kwargs) self.id = id -class ResourceName(msrest.serialization.Model): +class ResourceName(_serialization.Model): """The Resource Name. Variables are only populated by the server, and will be ignored when sending a request. @@ -27088,27 +26926,23 @@ class ResourceName(msrest.serialization.Model): """ _validation = { - 'value': {'readonly': True}, - 'localized_value': {'readonly': True}, + "value": {"readonly": True}, + "localized_value": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + "value": {"key": "value", "type": "str"}, + "localized_value": {"key": "localizedValue", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ResourceName, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.value = None self.localized_value = None -class ResourceQuota(msrest.serialization.Model): +class ResourceQuota(_serialization.Model): """The quota assigned to a resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -27122,36 +26956,32 @@ class ResourceQuota(msrest.serialization.Model): :ivar name: Name of the resource. :vartype name: ~azure.mgmt.machinelearningservices.models.ResourceName :ivar limit: The maximum permitted quota of the resource. - :vartype limit: long - :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". + :vartype limit: int + :ivar unit: An enum describing the unit of quota measurement. "Count" :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit """ _validation = { - 'id': {'readonly': True}, - 'aml_workspace_location': {'readonly': True}, - 'type': {'readonly': True}, - 'name': {'readonly': True}, - 'limit': {'readonly': True}, - 'unit': {'readonly': True}, + "id": {"readonly": True}, + "aml_workspace_location": {"readonly": True}, + "type": {"readonly": True}, + "name": {"readonly": True}, + "limit": {"readonly": True}, + "unit": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'ResourceName'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "aml_workspace_location": {"key": "amlWorkspaceLocation", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "name": {"key": "name", "type": "ResourceName"}, + "limit": {"key": "limit", "type": "int"}, + "unit": {"key": "unit", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(ResourceQuota, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.id = None self.aml_workspace_location = None self.type = None @@ -27160,41 +26990,35 @@ def __init__( self.unit = None -class Route(msrest.serialization.Model): +class Route(_serialization.Model): """Route. All required parameters must be populated in order to send to Azure. - :ivar path: Required. [Required] The path for the route. + :ivar path: [Required] The path for the route. Required. :vartype path: str - :ivar port: Required. [Required] The port for the route. + :ivar port: [Required] The port for the route. Required. :vartype port: int """ _validation = { - 'path': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'port': {'required': True}, + "path": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "port": {"required": True}, } _attribute_map = { - 'path': {'key': 'path', 'type': 'str'}, - 'port': {'key': 'port', 'type': 'int'}, + "path": {"key": "path", "type": "str"}, + "port": {"key": "port", "type": "int"}, } - def __init__( - self, - *, - path: str, - port: int, - **kwargs - ): + def __init__(self, *, path: str, port: int, **kwargs: Any) -> None: """ - :keyword path: Required. [Required] The path for the route. + :keyword path: [Required] The path for the route. Required. :paramtype path: str - :keyword port: Required. [Required] The port for the route. + :keyword port: [Required] The port for the route. Required. :paramtype port: int """ - super(Route, self).__init__(**kwargs) + super().__init__(**kwargs) self.path = path self.port = port @@ -27202,21 +27026,28 @@ def __init__( class SASAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """SASAuthTypeWorkspaceConnectionProperties. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", + :ivar category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar created_by_workspace_arm_id: The arm id of the workspace which created this connection. + :vartype created_by_workspace_arm_id: str :ivar expiry_time: :vartype expiry_time: ~datetime.datetime + :ivar is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :vartype is_shared_to_all: bool :ivar metadata: Any object. - :vartype metadata: any + :vartype metadata: JSON :ivar target: :vartype target: str :ivar credentials: @@ -27225,46 +27056,60 @@ class SASAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """ _validation = { - 'auth_type': {'required': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionSharedAccessSignature'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionSharedAccessSignature"}, } def __init__( self, *, - category: Optional[Union[str, "ConnectionCategory"]] = None, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, expiry_time: Optional[datetime.datetime] = None, - metadata: Optional[Any] = None, + is_shared_to_all: Optional[bool] = None, + metadata: Optional[JSON] = None, target: Optional[str] = None, - credentials: Optional["WorkspaceConnectionSharedAccessSignature"] = None, - **kwargs - ): + credentials: Optional["_models.WorkspaceConnectionSharedAccessSignature"] = None, + **kwargs: Any + ) -> None: """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", + :keyword category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime + :keyword is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :paramtype is_shared_to_all: bool :keyword metadata: Any object. - :paramtype metadata: any + :paramtype metadata: JSON :keyword target: :paramtype target: str :keyword credentials: :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionSharedAccessSignature """ - super(SASAuthTypeWorkspaceConnectionProperties, self).__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) - self.auth_type = 'SAS' # type: str + super().__init__( + category=category, + expiry_time=expiry_time, + is_shared_to_all=is_shared_to_all, + metadata=metadata, + target=target, + **kwargs + ) + self.auth_type: str = "SAS" self.credentials = credentials @@ -27273,8 +27118,8 @@ class SASCredentialDto(PendingUploadCredentialDto): All required parameters must be populated in order to send to Azure. - :ivar credential_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "SAS". + :ivar credential_type: [Required] Credential type used to authentication with storage. + Required. "SAS" :vartype credential_type: str or ~azure.mgmt.machinelearningservices.models.PendingUploadCredentialType :ivar sas_uri: Full SAS Uri, including the storage, container/blob path and SAS token. @@ -27282,26 +27127,21 @@ class SASCredentialDto(PendingUploadCredentialDto): """ _validation = { - 'credential_type': {'required': True}, + "credential_type": {"required": True}, } _attribute_map = { - 'credential_type': {'key': 'credentialType', 'type': 'str'}, - 'sas_uri': {'key': 'sasUri', 'type': 'str'}, + "credential_type": {"key": "credentialType", "type": "str"}, + "sas_uri": {"key": "sasUri", "type": "str"}, } - def __init__( - self, - *, - sas_uri: Optional[str] = None, - **kwargs - ): + def __init__(self, *, sas_uri: Optional[str] = None, **kwargs: Any) -> None: """ :keyword sas_uri: Full SAS Uri, including the storage, container/blob path and SAS token. :paramtype sas_uri: str """ - super(SASCredentialDto, self).__init__(**kwargs) - self.credential_type = 'SAS' # type: str + super().__init__(**kwargs) + self.credential_type: str = "SAS" self.sas_uri = sas_uri @@ -27310,36 +27150,31 @@ class SasDatastoreCredentials(DatastoreCredentials): All required parameters must be populated in order to send to Azure. - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType - :ivar secrets: Required. [Required] Storage container secrets. + :ivar secrets: [Required] Storage container secrets. Required. :vartype secrets: ~azure.mgmt.machinelearningservices.models.SasDatastoreSecrets """ _validation = { - 'credentials_type': {'required': True}, - 'secrets': {'required': True}, + "credentials_type": {"required": True}, + "secrets": {"required": True}, } _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'SasDatastoreSecrets'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, + "secrets": {"key": "secrets", "type": "SasDatastoreSecrets"}, } - def __init__( - self, - *, - secrets: "SasDatastoreSecrets", - **kwargs - ): + def __init__(self, *, secrets: "_models.SasDatastoreSecrets", **kwargs: Any) -> None: """ - :keyword secrets: Required. [Required] Storage container secrets. + :keyword secrets: [Required] Storage container secrets. Required. :paramtype secrets: ~azure.mgmt.machinelearningservices.models.SasDatastoreSecrets """ - super(SasDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'Sas' # type: str + super().__init__(**kwargs) + self.credentials_type: str = "Sas" self.secrets = secrets @@ -27348,44 +27183,39 @@ class SasDatastoreSecrets(DatastoreSecrets): All required parameters must be populated in order to send to Azure. - :ivar secrets_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", "Sas", - "ServicePrincipal", "KerberosPassword", "KerberosKeytab". + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType :ivar sas_token: Storage container SAS token. :vartype sas_token: str """ _validation = { - 'secrets_type': {'required': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'sas_token': {'key': 'sasToken', 'type': 'str'}, + "secrets_type": {"key": "secretsType", "type": "str"}, + "sas_token": {"key": "sasToken", "type": "str"}, } - def __init__( - self, - *, - sas_token: Optional[str] = None, - **kwargs - ): + def __init__(self, *, sas_token: Optional[str] = None, **kwargs: Any) -> None: """ :keyword sas_token: Storage container SAS token. :paramtype sas_token: str """ - super(SasDatastoreSecrets, self).__init__(**kwargs) - self.secrets_type = 'Sas' # type: str + super().__init__(**kwargs) + self.secrets_type: str = "Sas" self.sas_token = sas_token -class ScaleSettings(msrest.serialization.Model): +class ScaleSettings(_serialization.Model): """scale settings for AML Compute. All required parameters must be populated in order to send to Azure. - :ivar max_node_count: Required. Max number of nodes to use. + :ivar max_node_count: Max number of nodes to use. Required. :vartype max_node_count: int :ivar min_node_count: Min number of nodes to use. :vartype min_node_count: int @@ -27395,25 +27225,25 @@ class ScaleSettings(msrest.serialization.Model): """ _validation = { - 'max_node_count': {'required': True}, + "max_node_count": {"required": True}, } _attribute_map = { - 'max_node_count': {'key': 'maxNodeCount', 'type': 'int'}, - 'min_node_count': {'key': 'minNodeCount', 'type': 'int'}, - 'node_idle_time_before_scale_down': {'key': 'nodeIdleTimeBeforeScaleDown', 'type': 'duration'}, + "max_node_count": {"key": "maxNodeCount", "type": "int"}, + "min_node_count": {"key": "minNodeCount", "type": "int"}, + "node_idle_time_before_scale_down": {"key": "nodeIdleTimeBeforeScaleDown", "type": "duration"}, } def __init__( self, *, max_node_count: int, - min_node_count: Optional[int] = 0, + min_node_count: int = 0, node_idle_time_before_scale_down: Optional[datetime.timedelta] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword max_node_count: Required. Max number of nodes to use. + :keyword max_node_count: Max number of nodes to use. Required. :paramtype max_node_count: int :keyword min_node_count: Min number of nodes to use. :paramtype min_node_count: int @@ -27421,13 +27251,13 @@ def __init__( string needs to be in the RFC Format. :paramtype node_idle_time_before_scale_down: ~datetime.timedelta """ - super(ScaleSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.max_node_count = max_node_count self.min_node_count = min_node_count self.node_idle_time_before_scale_down = node_idle_time_before_scale_down -class ScaleSettingsInformation(msrest.serialization.Model): +class ScaleSettingsInformation(_serialization.Model): """Desired scale settings for the amlCompute. :ivar scale_settings: scale settings for AML Compute. @@ -27435,20 +27265,15 @@ class ScaleSettingsInformation(msrest.serialization.Model): """ _attribute_map = { - 'scale_settings': {'key': 'scaleSettings', 'type': 'ScaleSettings'}, + "scale_settings": {"key": "scaleSettings", "type": "ScaleSettings"}, } - def __init__( - self, - *, - scale_settings: Optional["ScaleSettings"] = None, - **kwargs - ): + def __init__(self, *, scale_settings: Optional["_models.ScaleSettings"] = None, **kwargs: Any) -> None: """ :keyword scale_settings: scale settings for AML Compute. :paramtype scale_settings: ~azure.mgmt.machinelearningservices.models.ScaleSettings """ - super(ScaleSettingsInformation, self).__init__(**kwargs) + super().__init__(**kwargs) self.scale_settings = scale_settings @@ -27470,80 +27295,74 @@ class Schedule(Resource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.ScheduleProperties """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'ScheduleProperties'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "ScheduleProperties"}, } - def __init__( - self, - *, - properties: "ScheduleProperties", - **kwargs - ): + def __init__(self, *, properties: "_models.ScheduleProperties", **kwargs: Any) -> None: """ - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ScheduleProperties """ - super(Schedule, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class ScheduleBase(msrest.serialization.Model): +class ScheduleBase(_serialization.Model): """ScheduleBase. :ivar id: A system assigned id for the schedule. :vartype id: str - :ivar provisioning_status: The current deployment state of schedule. Possible values include: - "Completed", "Provisioning", "Failed". + :ivar provisioning_status: The current deployment state of schedule. Known values are: + "Completed", "Provisioning", and "Failed". :vartype provisioning_status: str or ~azure.mgmt.machinelearningservices.models.ScheduleProvisioningState - :ivar status: Is the schedule enabled or disabled?. Possible values include: "Enabled", - "Disabled". + :ivar status: Is the schedule enabled or disabled?. Known values are: "Enabled" and "Disabled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus """ _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'provisioning_status': {'key': 'provisioningStatus', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "provisioning_status": {"key": "provisioningStatus", "type": "str"}, + "status": {"key": "status", "type": "str"}, } def __init__( self, *, - id: Optional[str] = None, - provisioning_status: Optional[Union[str, "ScheduleProvisioningState"]] = None, - status: Optional[Union[str, "ScheduleStatus"]] = None, - **kwargs - ): + id: Optional[str] = None, # pylint: disable=redefined-builtin + provisioning_status: Optional[Union[str, "_models.ScheduleProvisioningState"]] = None, + status: Optional[Union[str, "_models.ScheduleStatus"]] = None, + **kwargs: Any + ) -> None: """ :keyword id: A system assigned id for the schedule. :paramtype id: str - :keyword provisioning_status: The current deployment state of schedule. Possible values - include: "Completed", "Provisioning", "Failed". + :keyword provisioning_status: The current deployment state of schedule. Known values are: + "Completed", "Provisioning", and "Failed". :paramtype provisioning_status: str or ~azure.mgmt.machinelearningservices.models.ScheduleProvisioningState - :keyword status: Is the schedule enabled or disabled?. Possible values include: "Enabled", + :keyword status: Is the schedule enabled or disabled?. Known values are: "Enabled" and "Disabled". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.ScheduleStatus """ - super(ScheduleBase, self).__init__(**kwargs) + super().__init__(**kwargs) self.id = id self.provisioning_status = provisioning_status self.status = status @@ -27560,68 +27379,68 @@ class ScheduleProperties(ResourceBase): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] - :ivar action: Required. [Required] Specifies the action of the schedule. + :ivar action: [Required] Specifies the action of the schedule. Required. :vartype action: ~azure.mgmt.machinelearningservices.models.ScheduleActionBase :ivar display_name: Display name of schedule. :vartype display_name: str :ivar is_enabled: Is the schedule enabled?. :vartype is_enabled: bool - :ivar provisioning_state: Provisioning state for the schedule. Possible values include: - "Creating", "Updating", "Deleting", "Succeeded", "Failed", "Canceled". + :ivar provisioning_state: Provisioning state for the schedule. Known values are: "Creating", + "Updating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ScheduleProvisioningStatus - :ivar trigger: Required. [Required] Specifies the trigger details. + :ivar trigger: [Required] Specifies the trigger details. Required. :vartype trigger: ~azure.mgmt.machinelearningservices.models.TriggerBase """ _validation = { - 'action': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'trigger': {'required': True}, + "action": {"required": True}, + "provisioning_state": {"readonly": True}, + "trigger": {"required": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'action': {'key': 'action', 'type': 'ScheduleActionBase'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'is_enabled': {'key': 'isEnabled', 'type': 'bool'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'trigger': {'key': 'trigger', 'type': 'TriggerBase'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "action": {"key": "action", "type": "ScheduleActionBase"}, + "display_name": {"key": "displayName", "type": "str"}, + "is_enabled": {"key": "isEnabled", "type": "bool"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "trigger": {"key": "trigger", "type": "TriggerBase"}, } def __init__( self, *, - action: "ScheduleActionBase", - trigger: "TriggerBase", + action: "_models.ScheduleActionBase", + trigger: "_models.TriggerBase", description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, display_name: Optional[str] = None, - is_enabled: Optional[bool] = True, - **kwargs - ): + is_enabled: bool = True, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] - :keyword action: Required. [Required] Specifies the action of the schedule. + :keyword action: [Required] Specifies the action of the schedule. Required. :paramtype action: ~azure.mgmt.machinelearningservices.models.ScheduleActionBase :keyword display_name: Display name of schedule. :paramtype display_name: str :keyword is_enabled: Is the schedule enabled?. :paramtype is_enabled: bool - :keyword trigger: Required. [Required] Specifies the trigger details. + :keyword trigger: [Required] Specifies the trigger details. Required. :paramtype trigger: ~azure.mgmt.machinelearningservices.models.TriggerBase """ - super(ScheduleProperties, self).__init__(description=description, properties=properties, tags=tags, **kwargs) + super().__init__(description=description, properties=properties, tags=tags, **kwargs) self.action = action self.display_name = display_name self.is_enabled = is_enabled @@ -27629,7 +27448,7 @@ def __init__( self.trigger = trigger -class ScheduleResourceArmPaginatedResult(msrest.serialization.Model): +class ScheduleResourceArmPaginatedResult(_serialization.Model): """A paginated list of Schedule entities. :ivar next_link: The link to the next page of Schedule objects. If null, there are no @@ -27640,17 +27459,13 @@ class ScheduleResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Schedule]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Schedule]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["Schedule"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.Schedule"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of Schedule objects. If null, there are no additional pages. @@ -27658,12 +27473,12 @@ def __init__( :keyword value: An array of objects of type Schedule. :paramtype value: list[~azure.mgmt.machinelearningservices.models.Schedule] """ - super(ScheduleResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class ScriptReference(msrest.serialization.Model): +class ScriptReference(_serialization.Model): """Script reference. :ivar script_source: The storage source of the script: inline, workspace. @@ -27677,10 +27492,10 @@ class ScriptReference(msrest.serialization.Model): """ _attribute_map = { - 'script_source': {'key': 'scriptSource', 'type': 'str'}, - 'script_data': {'key': 'scriptData', 'type': 'str'}, - 'script_arguments': {'key': 'scriptArguments', 'type': 'str'}, - 'timeout': {'key': 'timeout', 'type': 'str'}, + "script_source": {"key": "scriptSource", "type": "str"}, + "script_data": {"key": "scriptData", "type": "str"}, + "script_arguments": {"key": "scriptArguments", "type": "str"}, + "timeout": {"key": "timeout", "type": "str"}, } def __init__( @@ -27690,8 +27505,8 @@ def __init__( script_data: Optional[str] = None, script_arguments: Optional[str] = None, timeout: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword script_source: The storage source of the script: inline, workspace. :paramtype script_source: str @@ -27702,14 +27517,14 @@ def __init__( :keyword timeout: Optional time period passed to timeout command. :paramtype timeout: str """ - super(ScriptReference, self).__init__(**kwargs) + super().__init__(**kwargs) self.script_source = script_source self.script_data = script_data self.script_arguments = script_arguments self.timeout = timeout -class ScriptsToExecute(msrest.serialization.Model): +class ScriptsToExecute(_serialization.Model): """Customized setup scripts. :ivar startup_script: Script that's run every time the machine starts. @@ -27719,29 +27534,29 @@ class ScriptsToExecute(msrest.serialization.Model): """ _attribute_map = { - 'startup_script': {'key': 'startupScript', 'type': 'ScriptReference'}, - 'creation_script': {'key': 'creationScript', 'type': 'ScriptReference'}, + "startup_script": {"key": "startupScript", "type": "ScriptReference"}, + "creation_script": {"key": "creationScript", "type": "ScriptReference"}, } def __init__( self, *, - startup_script: Optional["ScriptReference"] = None, - creation_script: Optional["ScriptReference"] = None, - **kwargs - ): + startup_script: Optional["_models.ScriptReference"] = None, + creation_script: Optional["_models.ScriptReference"] = None, + **kwargs: Any + ) -> None: """ :keyword startup_script: Script that's run every time the machine starts. :paramtype startup_script: ~azure.mgmt.machinelearningservices.models.ScriptReference :keyword creation_script: Script that's run only once during provision of the compute. :paramtype creation_script: ~azure.mgmt.machinelearningservices.models.ScriptReference """ - super(ScriptsToExecute, self).__init__(**kwargs) + super().__init__(**kwargs) self.startup_script = startup_script self.creation_script = creation_script -class SecretConfiguration(msrest.serialization.Model): +class SecretConfiguration(_serialization.Model): """Secret Configuration definition. :ivar uri: Secret Uri. @@ -27752,17 +27567,13 @@ class SecretConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'uri': {'key': 'uri', 'type': 'str'}, - 'workspace_secret_name': {'key': 'workspaceSecretName', 'type': 'str'}, + "uri": {"key": "uri", "type": "str"}, + "workspace_secret_name": {"key": "workspaceSecretName", "type": "str"}, } def __init__( - self, - *, - uri: Optional[str] = None, - workspace_secret_name: Optional[str] = None, - **kwargs - ): + self, *, uri: Optional[str] = None, workspace_secret_name: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword uri: Secret Uri. Sample Uri : https://myvault.vault.azure.net/secrets/mysecretname/secretversion. @@ -27770,11 +27581,48 @@ def __init__( :keyword workspace_secret_name: Name of secret in workspace key vault. :paramtype workspace_secret_name: str """ - super(SecretConfiguration, self).__init__(**kwargs) + super().__init__(**kwargs) self.uri = uri self.workspace_secret_name = workspace_secret_name +class ServerlessComputeSettings(_serialization.Model): + """ServerlessComputeSettings. + + :ivar serverless_compute_custom_subnet: The resource ID of an existing virtual network subnet + in which serverless compute nodes should be deployed. + :vartype serverless_compute_custom_subnet: str + :ivar serverless_compute_no_public_ip: The flag to signal if serverless compute nodes deployed + in custom vNet would have no public IP addresses for a workspace with private endpoint. + :vartype serverless_compute_no_public_ip: bool + """ + + _attribute_map = { + "serverless_compute_custom_subnet": {"key": "serverlessComputeCustomSubnet", "type": "str"}, + "serverless_compute_no_public_ip": {"key": "serverlessComputeNoPublicIP", "type": "bool"}, + } + + def __init__( + self, + *, + serverless_compute_custom_subnet: Optional[str] = None, + serverless_compute_no_public_ip: Optional[bool] = None, + **kwargs: Any + ) -> None: + """ + :keyword serverless_compute_custom_subnet: The resource ID of an existing virtual network + subnet in which serverless compute nodes should be deployed. + :paramtype serverless_compute_custom_subnet: str + :keyword serverless_compute_no_public_ip: The flag to signal if serverless compute nodes + deployed in custom vNet would have no public IP addresses for a workspace with private + endpoint. + :paramtype serverless_compute_no_public_ip: bool + """ + super().__init__(**kwargs) + self.serverless_compute_custom_subnet = serverless_compute_custom_subnet + self.serverless_compute_no_public_ip = serverless_compute_no_public_ip + + class ServerlessEndpoint(TrackedResource): """ServerlessEndpoint. @@ -27793,77 +27641,77 @@ class ServerlessEndpoint(TrackedResource): :ivar system_data: Azure Resource Manager metadata containing createdBy and modifiedBy information. :vartype system_data: ~azure.mgmt.machinelearningservices.models.SystemData - :ivar tags: A set of tags. Resource tags. + :ivar tags: Resource tags. :vartype tags: dict[str, str] - :ivar location: Required. The geo-location where the resource lives. + :ivar location: The geo-location where the resource lives. Required. :vartype location: str :ivar identity: Managed service identity (system assigned and/or user assigned identities). :vartype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity :ivar kind: Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type. :vartype kind: str - :ivar properties: Required. [Required] Additional attributes of the entity. + :ivar properties: [Required] Additional attributes of the entity. Required. :vartype properties: ~azure.mgmt.machinelearningservices.models.ServerlessEndpointProperties :ivar sku: Sku details required for ARM contract for Autoscaling. :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'location': {'required': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "location": {"required": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'location': {'key': 'location', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': 'ServerlessEndpointProperties'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "tags": {"key": "tags", "type": "{str}"}, + "location": {"key": "location", "type": "str"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "properties": {"key": "properties", "type": "ServerlessEndpointProperties"}, + "sku": {"key": "sku", "type": "Sku"}, } def __init__( self, *, location: str, - properties: "ServerlessEndpointProperties", + properties: "_models.ServerlessEndpointProperties", tags: Optional[Dict[str, str]] = None, - identity: Optional["ManagedServiceIdentity"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, kind: Optional[str] = None, - sku: Optional["Sku"] = None, - **kwargs - ): + sku: Optional["_models.Sku"] = None, + **kwargs: Any + ) -> None: """ - :keyword tags: A set of tags. Resource tags. + :keyword tags: Resource tags. :paramtype tags: dict[str, str] - :keyword location: Required. The geo-location where the resource lives. + :keyword location: The geo-location where the resource lives. Required. :paramtype location: str :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity :keyword kind: Metadata used by portal/tooling/etc to render different UX experiences for resources of the same type. :paramtype kind: str - :keyword properties: Required. [Required] Additional attributes of the entity. + :keyword properties: [Required] Additional attributes of the entity. Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.ServerlessEndpointProperties :keyword sku: Sku details required for ARM contract for Autoscaling. :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku """ - super(ServerlessEndpoint, self).__init__(tags=tags, location=location, **kwargs) + super().__init__(tags=tags, location=location, **kwargs) self.identity = identity self.kind = kind self.properties = properties self.sku = sku -class ServerlessEndpointProperties(msrest.serialization.Model): +class ServerlessEndpointProperties(_serialization.Model): """ServerlessEndpointProperties. Variables are only populated by the server, and will be ignored when sending a request. @@ -27873,45 +27721,40 @@ class ServerlessEndpointProperties(msrest.serialization.Model): :ivar inference_uri: The inference uri to target when making requests against the serverless endpoint. :vartype inference_uri: str - :ivar model_profile: Required. [Required] The model profile to configure the serverless - endpoint with. + :ivar model_profile: [Required] The model profile to configure the serverless endpoint with. + Required. :vartype model_profile: ~azure.mgmt.machinelearningservices.models.ModelProfile - :ivar provisioning_state: Provisioning state for the endpoint. Possible values include: - "Creating", "Deleting", "Succeeded", "Failed", "Updating", "Canceled". + :ivar provisioning_state: Provisioning state for the endpoint. Known values are: "Creating", + "Deleting", "Succeeded", "Failed", "Updating", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.EndpointProvisioningState """ _validation = { - 'inference_uri': {'readonly': True}, - 'model_profile': {'required': True}, - 'provisioning_state': {'readonly': True}, + "inference_uri": {"readonly": True}, + "model_profile": {"required": True}, + "provisioning_state": {"readonly": True}, } _attribute_map = { - 'inference_uri': {'key': 'inferenceUri', 'type': 'str'}, - 'model_profile': {'key': 'modelProfile', 'type': 'ModelProfile'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, + "inference_uri": {"key": "inferenceUri", "type": "str"}, + "model_profile": {"key": "modelProfile", "type": "ModelProfile"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, } - def __init__( - self, - *, - model_profile: "ModelProfile", - **kwargs - ): + def __init__(self, *, model_profile: "_models.ModelProfile", **kwargs: Any) -> None: """ - :keyword model_profile: Required. [Required] The model profile to configure the serverless - endpoint with. + :keyword model_profile: [Required] The model profile to configure the serverless endpoint with. + Required. :paramtype model_profile: ~azure.mgmt.machinelearningservices.models.ModelProfile """ - super(ServerlessEndpointProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.inference_uri = None self.model_profile = model_profile self.provisioning_state = None -class ServerlessEndpointTrackedResourceArmPaginatedResult(msrest.serialization.Model): +class ServerlessEndpointTrackedResourceArmPaginatedResult(_serialization.Model): """A paginated list of ServerlessEndpoint entities. :ivar next_link: The link to the next page of ServerlessEndpoint objects. If null, there are no @@ -27922,17 +27765,17 @@ class ServerlessEndpointTrackedResourceArmPaginatedResult(msrest.serialization.M """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[ServerlessEndpoint]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[ServerlessEndpoint]"}, } def __init__( self, *, next_link: Optional[str] = None, - value: Optional[List["ServerlessEndpoint"]] = None, - **kwargs - ): + value: Optional[List["_models.ServerlessEndpoint"]] = None, + **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of ServerlessEndpoint objects. If null, there are no additional pages. @@ -27940,12 +27783,12 @@ def __init__( :keyword value: An array of objects of type ServerlessEndpoint. :paramtype value: list[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] """ - super(ServerlessEndpointTrackedResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class ServiceManagedResourcesSettings(msrest.serialization.Model): +class ServiceManagedResourcesSettings(_serialization.Model): """ServiceManagedResourcesSettings. :ivar cosmos_db: @@ -27953,41 +27796,43 @@ class ServiceManagedResourcesSettings(msrest.serialization.Model): """ _attribute_map = { - 'cosmos_db': {'key': 'cosmosDb', 'type': 'CosmosDbSettings'}, + "cosmos_db": {"key": "cosmosDb", "type": "CosmosDbSettings"}, } - def __init__( - self, - *, - cosmos_db: Optional["CosmosDbSettings"] = None, - **kwargs - ): + def __init__(self, *, cosmos_db: Optional["_models.CosmosDbSettings"] = None, **kwargs: Any) -> None: """ :keyword cosmos_db: :paramtype cosmos_db: ~azure.mgmt.machinelearningservices.models.CosmosDbSettings """ - super(ServiceManagedResourcesSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.cosmos_db = cosmos_db class ServicePrincipalAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """ServicePrincipalAuthTypeWorkspaceConnectionProperties. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", + :ivar category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar created_by_workspace_arm_id: The arm id of the workspace which created this connection. + :vartype created_by_workspace_arm_id: str :ivar expiry_time: :vartype expiry_time: ~datetime.datetime + :ivar is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :vartype is_shared_to_all: bool :ivar metadata: Any object. - :vartype metadata: any + :vartype metadata: JSON :ivar target: :vartype target: str :ivar credentials: @@ -27996,46 +27841,60 @@ class ServicePrincipalAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionP """ _validation = { - 'auth_type': {'required': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionServicePrincipal'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionServicePrincipal"}, } def __init__( self, *, - category: Optional[Union[str, "ConnectionCategory"]] = None, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, expiry_time: Optional[datetime.datetime] = None, - metadata: Optional[Any] = None, + is_shared_to_all: Optional[bool] = None, + metadata: Optional[JSON] = None, target: Optional[str] = None, - credentials: Optional["WorkspaceConnectionServicePrincipal"] = None, - **kwargs - ): + credentials: Optional["_models.WorkspaceConnectionServicePrincipal"] = None, + **kwargs: Any + ) -> None: """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", + :keyword category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime + :keyword is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :paramtype is_shared_to_all: bool :keyword metadata: Any object. - :paramtype metadata: any + :paramtype metadata: JSON :keyword target: :paramtype target: str :keyword credentials: :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionServicePrincipal """ - super(ServicePrincipalAuthTypeWorkspaceConnectionProperties, self).__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) - self.auth_type = 'ServicePrincipal' # type: str + super().__init__( + category=category, + expiry_time=expiry_time, + is_shared_to_all=is_shared_to_all, + metadata=metadata, + target=target, + **kwargs + ) + self.auth_type: str = "ServicePrincipal" self.credentials = credentials @@ -28044,63 +27903,63 @@ class ServicePrincipalDatastoreCredentials(DatastoreCredentials): All required parameters must be populated in order to send to Azure. - :ivar credentials_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", - "None", "Sas", "ServicePrincipal", "KerberosKeytab", "KerberosPassword". + :ivar credentials_type: [Required] Credential type used to authentication with storage. + Required. Known values are: "AccountKey", "Certificate", "None", "Sas", "ServicePrincipal", + "KerberosKeytab", and "KerberosPassword". :vartype credentials_type: str or ~azure.mgmt.machinelearningservices.models.CredentialsType :ivar authority_url: Authority URL used for authentication. :vartype authority_url: str - :ivar client_id: Required. [Required] Service principal client ID. + :ivar client_id: [Required] Service principal client ID. Required. :vartype client_id: str :ivar resource_url: Resource the service principal has access to. :vartype resource_url: str - :ivar secrets: Required. [Required] Service principal secrets. + :ivar secrets: [Required] Service principal secrets. Required. :vartype secrets: ~azure.mgmt.machinelearningservices.models.ServicePrincipalDatastoreSecrets - :ivar tenant_id: Required. [Required] ID of the tenant to which the service principal belongs. + :ivar tenant_id: [Required] ID of the tenant to which the service principal belongs. Required. :vartype tenant_id: str """ _validation = { - 'credentials_type': {'required': True}, - 'client_id': {'required': True}, - 'secrets': {'required': True}, - 'tenant_id': {'required': True}, + "credentials_type": {"required": True}, + "client_id": {"required": True}, + "secrets": {"required": True}, + "tenant_id": {"required": True}, } _attribute_map = { - 'credentials_type': {'key': 'credentialsType', 'type': 'str'}, - 'authority_url': {'key': 'authorityUrl', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'resource_url': {'key': 'resourceUrl', 'type': 'str'}, - 'secrets': {'key': 'secrets', 'type': 'ServicePrincipalDatastoreSecrets'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + "credentials_type": {"key": "credentialsType", "type": "str"}, + "authority_url": {"key": "authorityUrl", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, + "resource_url": {"key": "resourceUrl", "type": "str"}, + "secrets": {"key": "secrets", "type": "ServicePrincipalDatastoreSecrets"}, + "tenant_id": {"key": "tenantId", "type": "str"}, } def __init__( self, *, client_id: str, - secrets: "ServicePrincipalDatastoreSecrets", + secrets: "_models.ServicePrincipalDatastoreSecrets", tenant_id: str, authority_url: Optional[str] = None, resource_url: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword authority_url: Authority URL used for authentication. :paramtype authority_url: str - :keyword client_id: Required. [Required] Service principal client ID. + :keyword client_id: [Required] Service principal client ID. Required. :paramtype client_id: str :keyword resource_url: Resource the service principal has access to. :paramtype resource_url: str - :keyword secrets: Required. [Required] Service principal secrets. + :keyword secrets: [Required] Service principal secrets. Required. :paramtype secrets: ~azure.mgmt.machinelearningservices.models.ServicePrincipalDatastoreSecrets - :keyword tenant_id: Required. [Required] ID of the tenant to which the service principal - belongs. + :keyword tenant_id: [Required] ID of the tenant to which the service principal belongs. + Required. :paramtype tenant_id: str """ - super(ServicePrincipalDatastoreCredentials, self).__init__(**kwargs) - self.credentials_type = 'ServicePrincipal' # type: str + super().__init__(**kwargs) + self.credentials_type: str = "ServicePrincipal" self.authority_url = authority_url self.client_id = client_id self.resource_url = resource_url @@ -28113,44 +27972,40 @@ class ServicePrincipalDatastoreSecrets(DatastoreSecrets): All required parameters must be populated in order to send to Azure. - :ivar secrets_type: Required. [Required] Credential type used to authentication with - storage.Constant filled by server. Possible values include: "AccountKey", "Certificate", "Sas", - "ServicePrincipal", "KerberosPassword", "KerberosKeytab". + :ivar secrets_type: [Required] Credential type used to authentication with storage. Required. + Known values are: "AccountKey", "Certificate", "Sas", "ServicePrincipal", "KerberosPassword", + and "KerberosKeytab". :vartype secrets_type: str or ~azure.mgmt.machinelearningservices.models.SecretsType :ivar client_secret: Service principal secret. :vartype client_secret: str """ _validation = { - 'secrets_type': {'required': True}, + "secrets_type": {"required": True}, } _attribute_map = { - 'secrets_type': {'key': 'secretsType', 'type': 'str'}, - 'client_secret': {'key': 'clientSecret', 'type': 'str'}, + "secrets_type": {"key": "secretsType", "type": "str"}, + "client_secret": {"key": "clientSecret", "type": "str"}, } - def __init__( - self, - *, - client_secret: Optional[str] = None, - **kwargs - ): + def __init__(self, *, client_secret: Optional[str] = None, **kwargs: Any) -> None: """ :keyword client_secret: Service principal secret. :paramtype client_secret: str """ - super(ServicePrincipalDatastoreSecrets, self).__init__(**kwargs) - self.secrets_type = 'ServicePrincipal' # type: str + super().__init__(**kwargs) + self.secrets_type: str = "ServicePrincipal" self.client_secret = client_secret -class ServiceTagDestination(msrest.serialization.Model): - """Service Tag destination for a Service Tag Outbound Rule for the managed network of a machine learning workspace. +class ServiceTagDestination(_serialization.Model): + """Service Tag destination for a Service Tag Outbound Rule for the managed network of a machine + learning workspace. Variables are only populated by the server, and will be ignored when sending a request. - :ivar action: The action enum for networking rule. Possible values include: "Allow", "Deny". + :ivar action: The action enum for networking rule. Known values are: "Allow" and "Deny". :vartype action: str or ~azure.mgmt.machinelearningservices.models.RuleAction :ivar address_prefixes: Optional, if provided, the ServiceTag property will be ignored. :vartype address_prefixes: list[str] @@ -28163,28 +28018,28 @@ class ServiceTagDestination(msrest.serialization.Model): """ _validation = { - 'address_prefixes': {'readonly': True}, + "address_prefixes": {"readonly": True}, } _attribute_map = { - 'action': {'key': 'action', 'type': 'str'}, - 'address_prefixes': {'key': 'addressPrefixes', 'type': '[str]'}, - 'port_ranges': {'key': 'portRanges', 'type': 'str'}, - 'protocol': {'key': 'protocol', 'type': 'str'}, - 'service_tag': {'key': 'serviceTag', 'type': 'str'}, + "action": {"key": "action", "type": "str"}, + "address_prefixes": {"key": "addressPrefixes", "type": "[str]"}, + "port_ranges": {"key": "portRanges", "type": "str"}, + "protocol": {"key": "protocol", "type": "str"}, + "service_tag": {"key": "serviceTag", "type": "str"}, } def __init__( self, *, - action: Optional[Union[str, "RuleAction"]] = None, + action: Optional[Union[str, "_models.RuleAction"]] = None, port_ranges: Optional[str] = None, protocol: Optional[str] = None, service_tag: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword action: The action enum for networking rule. Possible values include: "Allow", "Deny". + :keyword action: The action enum for networking rule. Known values are: "Allow" and "Deny". :paramtype action: str or ~azure.mgmt.machinelearningservices.models.RuleAction :keyword port_ranges: :paramtype port_ranges: str @@ -28193,7 +28048,7 @@ def __init__( :keyword service_tag: :paramtype service_tag: str """ - super(ServiceTagDestination, self).__init__(**kwargs) + super().__init__(**kwargs) self.action = action self.address_prefixes = None self.port_ranges = port_ranges @@ -28206,15 +28061,14 @@ class ServiceTagOutboundRule(OutboundRule): All required parameters must be populated in order to send to Azure. - :ivar category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". + :ivar category: Category of a managed network outbound rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". :vartype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :ivar status: Type of a managed network Outbound Rule of a machine learning workspace. Possible - values include: "Inactive", "Active". + :ivar status: Type of a managed network outbound rule of a machine learning workspace. Known + values are: "Inactive" and "Active". :vartype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus - :ivar type: Required. Type of a managed network Outbound Rule of a machine learning - workspace.Constant filled by server. Possible values include: "FQDN", "PrivateEndpoint", - "ServiceTag". + :ivar type: Type of a managed network outbound rule of a machine learning workspace. Required. + Known values are: "FQDN", "PrivateEndpoint", and "ServiceTag". :vartype type: str or ~azure.mgmt.machinelearningservices.models.RuleType :ivar destination: Service Tag destination for a Service Tag Outbound Rule for the managed network of a machine learning workspace. @@ -28222,41 +28076,41 @@ class ServiceTagOutboundRule(OutboundRule): """ _validation = { - 'type': {'required': True}, + "type": {"required": True}, } _attribute_map = { - 'category': {'key': 'category', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'destination': {'key': 'destination', 'type': 'ServiceTagDestination'}, + "category": {"key": "category", "type": "str"}, + "status": {"key": "status", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "destination": {"key": "destination", "type": "ServiceTagDestination"}, } def __init__( self, *, - category: Optional[Union[str, "RuleCategory"]] = None, - status: Optional[Union[str, "RuleStatus"]] = None, - destination: Optional["ServiceTagDestination"] = None, - **kwargs - ): + category: Optional[Union[str, "_models.RuleCategory"]] = None, + status: Optional[Union[str, "_models.RuleStatus"]] = None, + destination: Optional["_models.ServiceTagDestination"] = None, + **kwargs: Any + ) -> None: """ - :keyword category: Category of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Required", "Recommended", "UserDefined". + :keyword category: Category of a managed network outbound rule of a machine learning workspace. + Known values are: "Required", "Recommended", and "UserDefined". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.RuleCategory - :keyword status: Type of a managed network Outbound Rule of a machine learning workspace. - Possible values include: "Inactive", "Active". + :keyword status: Type of a managed network outbound rule of a machine learning workspace. Known + values are: "Inactive" and "Active". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.RuleStatus :keyword destination: Service Tag destination for a Service Tag Outbound Rule for the managed network of a machine learning workspace. :paramtype destination: ~azure.mgmt.machinelearningservices.models.ServiceTagDestination """ - super(ServiceTagOutboundRule, self).__init__(category=category, status=status, **kwargs) - self.type = 'ServiceTag' # type: str + super().__init__(category=category, status=status, **kwargs) + self.type: str = "ServiceTag" self.destination = destination -class SetupScripts(msrest.serialization.Model): +class SetupScripts(_serialization.Model): """Details of customized scripts to execute for setting up the cluster. :ivar scripts: Customized setup scripts. @@ -28264,24 +28118,19 @@ class SetupScripts(msrest.serialization.Model): """ _attribute_map = { - 'scripts': {'key': 'scripts', 'type': 'ScriptsToExecute'}, + "scripts": {"key": "scripts", "type": "ScriptsToExecute"}, } - def __init__( - self, - *, - scripts: Optional["ScriptsToExecute"] = None, - **kwargs - ): + def __init__(self, *, scripts: Optional["_models.ScriptsToExecute"] = None, **kwargs: Any) -> None: """ :keyword scripts: Customized setup scripts. :paramtype scripts: ~azure.mgmt.machinelearningservices.models.ScriptsToExecute """ - super(SetupScripts, self).__init__(**kwargs) + super().__init__(**kwargs) self.scripts = scripts -class SharedPrivateLinkResource(msrest.serialization.Model): +class SharedPrivateLinkResource(_serialization.Model): """SharedPrivateLinkResource. :ivar name: Unique name of the private link. @@ -28292,18 +28141,18 @@ class SharedPrivateLinkResource(msrest.serialization.Model): :vartype private_link_resource_id: str :ivar request_message: Request message. :vartype request_message: str - :ivar status: Connection status of the service consumer with the service provider. Possible - values include: "Approved", "Pending", "Rejected", "Disconnected", "Timeout". + :ivar status: Connection status of the service consumer with the service provider. Known values + are: "Approved", "Pending", "Rejected", "Disconnected", and "Timeout". :vartype status: str or ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'group_id': {'key': 'properties.groupId', 'type': 'str'}, - 'private_link_resource_id': {'key': 'properties.privateLinkResourceId', 'type': 'str'}, - 'request_message': {'key': 'properties.requestMessage', 'type': 'str'}, - 'status': {'key': 'properties.status', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "group_id": {"key": "properties.groupId", "type": "str"}, + "private_link_resource_id": {"key": "properties.privateLinkResourceId", "type": "str"}, + "request_message": {"key": "properties.requestMessage", "type": "str"}, + "status": {"key": "properties.status", "type": "str"}, } def __init__( @@ -28313,9 +28162,9 @@ def __init__( group_id: Optional[str] = None, private_link_resource_id: Optional[str] = None, request_message: Optional[str] = None, - status: Optional[Union[str, "EndpointServiceConnectionStatus"]] = None, - **kwargs - ): + status: Optional[Union[str, "_models.EndpointServiceConnectionStatus"]] = None, + **kwargs: Any + ) -> None: """ :keyword name: Unique name of the private link. :paramtype name: str @@ -28325,12 +28174,12 @@ def __init__( :paramtype private_link_resource_id: str :keyword request_message: Request message. :paramtype request_message: str - :keyword status: Connection status of the service consumer with the service provider. Possible - values include: "Approved", "Pending", "Rejected", "Disconnected", "Timeout". + :keyword status: Connection status of the service consumer with the service provider. Known + values are: "Approved", "Pending", "Rejected", "Disconnected", and "Timeout". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.EndpointServiceConnectionStatus """ - super(SharedPrivateLinkResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.group_id = group_id self.private_link_resource_id = private_link_resource_id @@ -28338,16 +28187,16 @@ def __init__( self.status = status -class Sku(msrest.serialization.Model): +class Sku(_serialization.Model): """The resource model definition representing SKU. All required parameters must be populated in order to send to Azure. - :ivar name: Required. The name of the SKU. Ex - P3. It is typically a letter+number code. + :ivar name: The name of the SKU. Ex - P3. It is typically a letter+number code. Required. :vartype name: str :ivar tier: This field is required to be implemented by the Resource Provider if the service - has more than one tier, but is not required on a PUT. Possible values include: "Free", "Basic", - "Standard", "Premium". + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", and "Premium". :vartype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier :ivar size: The SKU size. When the name field is the combination of tier and some other value, this would be the standalone code. @@ -28361,33 +28210,33 @@ class Sku(msrest.serialization.Model): """ _validation = { - 'name': {'required': True}, + "name": {"required": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, - 'size': {'key': 'size', 'type': 'str'}, - 'family': {'key': 'family', 'type': 'str'}, - 'capacity': {'key': 'capacity', 'type': 'int'}, + "name": {"key": "name", "type": "str"}, + "tier": {"key": "tier", "type": "str"}, + "size": {"key": "size", "type": "str"}, + "family": {"key": "family", "type": "str"}, + "capacity": {"key": "capacity", "type": "int"}, } def __init__( self, *, name: str, - tier: Optional[Union[str, "SkuTier"]] = None, + tier: Optional[Union[str, "_models.SkuTier"]] = None, size: Optional[str] = None, family: Optional[str] = None, capacity: Optional[int] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword name: Required. The name of the SKU. Ex - P3. It is typically a letter+number code. + :keyword name: The name of the SKU. Ex - P3. It is typically a letter+number code. Required. :paramtype name: str :keyword tier: This field is required to be implemented by the Resource Provider if the service - has more than one tier, but is not required on a PUT. Possible values include: "Free", "Basic", - "Standard", "Premium". + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", and "Premium". :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier :keyword size: The SKU size. When the name field is the combination of tier and some other value, this would be the standalone code. @@ -28399,7 +28248,7 @@ def __init__( included. If scale out/in is not possible for the resource this may be omitted. :paramtype capacity: int """ - super(Sku, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.tier = tier self.size = size @@ -28407,7 +28256,7 @@ def __init__( self.capacity = capacity -class SkuCapacity(msrest.serialization.Model): +class SkuCapacity(_serialization.Model): """SKU capacity information. :ivar default: Gets or sets the default capacity. @@ -28416,27 +28265,27 @@ class SkuCapacity(msrest.serialization.Model): :vartype maximum: int :ivar minimum: Gets or sets the minimum. :vartype minimum: int - :ivar scale_type: Gets or sets the type of the scale. Possible values include: "Automatic", - "Manual", "None". + :ivar scale_type: Gets or sets the type of the scale. Known values are: "Automatic", "Manual", + and "None". :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.SkuScaleType """ _attribute_map = { - 'default': {'key': 'default', 'type': 'int'}, - 'maximum': {'key': 'maximum', 'type': 'int'}, - 'minimum': {'key': 'minimum', 'type': 'int'}, - 'scale_type': {'key': 'scaleType', 'type': 'str'}, + "default": {"key": "default", "type": "int"}, + "maximum": {"key": "maximum", "type": "int"}, + "minimum": {"key": "minimum", "type": "int"}, + "scale_type": {"key": "scaleType", "type": "str"}, } def __init__( self, *, - default: Optional[int] = 0, - maximum: Optional[int] = 0, - minimum: Optional[int] = 0, - scale_type: Optional[Union[str, "SkuScaleType"]] = None, - **kwargs - ): + default: int = 0, + maximum: int = 0, + minimum: int = 0, + scale_type: Optional[Union[str, "_models.SkuScaleType"]] = None, + **kwargs: Any + ) -> None: """ :keyword default: Gets or sets the default capacity. :paramtype default: int @@ -28444,18 +28293,18 @@ def __init__( :paramtype maximum: int :keyword minimum: Gets or sets the minimum. :paramtype minimum: int - :keyword scale_type: Gets or sets the type of the scale. Possible values include: "Automatic", - "Manual", "None". + :keyword scale_type: Gets or sets the type of the scale. Known values are: "Automatic", + "Manual", and "None". :paramtype scale_type: str or ~azure.mgmt.machinelearningservices.models.SkuScaleType """ - super(SkuCapacity, self).__init__(**kwargs) + super().__init__(**kwargs) self.default = default self.maximum = maximum self.minimum = minimum self.scale_type = scale_type -class SkuResource(msrest.serialization.Model): +class SkuResource(_serialization.Model): """Fulfills ARM Contract requirement to list all available SKUS for a resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -28469,35 +28318,35 @@ class SkuResource(msrest.serialization.Model): """ _validation = { - 'resource_type': {'readonly': True}, + "resource_type": {"readonly": True}, } _attribute_map = { - 'capacity': {'key': 'capacity', 'type': 'SkuCapacity'}, - 'resource_type': {'key': 'resourceType', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'SkuSetting'}, + "capacity": {"key": "capacity", "type": "SkuCapacity"}, + "resource_type": {"key": "resourceType", "type": "str"}, + "sku": {"key": "sku", "type": "SkuSetting"}, } def __init__( self, *, - capacity: Optional["SkuCapacity"] = None, - sku: Optional["SkuSetting"] = None, - **kwargs - ): + capacity: Optional["_models.SkuCapacity"] = None, + sku: Optional["_models.SkuSetting"] = None, + **kwargs: Any + ) -> None: """ :keyword capacity: Gets or sets the Sku Capacity. :paramtype capacity: ~azure.mgmt.machinelearningservices.models.SkuCapacity :keyword sku: Gets or sets the Sku. :paramtype sku: ~azure.mgmt.machinelearningservices.models.SkuSetting """ - super(SkuResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.capacity = capacity self.resource_type = None self.sku = sku -class SkuResourceArmPaginatedResult(msrest.serialization.Model): +class SkuResourceArmPaginatedResult(_serialization.Model): """A paginated list of SkuResource entities. :ivar next_link: The link to the next page of SkuResource objects. If null, there are no @@ -28508,17 +28357,13 @@ class SkuResourceArmPaginatedResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[SkuResource]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[SkuResource]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["SkuResource"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.SkuResource"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page of SkuResource objects. If null, there are no additional pages. @@ -28526,56 +28371,50 @@ def __init__( :keyword value: An array of objects of type SkuResource. :paramtype value: list[~azure.mgmt.machinelearningservices.models.SkuResource] """ - super(SkuResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class SkuSetting(msrest.serialization.Model): +class SkuSetting(_serialization.Model): """SkuSetting fulfills the need for stripped down SKU info in ARM contract. All required parameters must be populated in order to send to Azure. - :ivar name: Required. [Required] The name of the SKU. Ex - P3. It is typically a letter+number - code. + :ivar name: [Required] The name of the SKU. Ex - P3. It is typically a letter+number code. + Required. :vartype name: str :ivar tier: This field is required to be implemented by the Resource Provider if the service - has more than one tier, but is not required on a PUT. Possible values include: "Free", "Basic", - "Standard", "Premium". + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", and "Premium". :vartype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier """ _validation = { - 'name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'tier': {'key': 'tier', 'type': 'str'}, + "name": {"key": "name", "type": "str"}, + "tier": {"key": "tier", "type": "str"}, } - def __init__( - self, - *, - name: str, - tier: Optional[Union[str, "SkuTier"]] = None, - **kwargs - ): + def __init__(self, *, name: str, tier: Optional[Union[str, "_models.SkuTier"]] = None, **kwargs: Any) -> None: """ - :keyword name: Required. [Required] The name of the SKU. Ex - P3. It is typically a - letter+number code. + :keyword name: [Required] The name of the SKU. Ex - P3. It is typically a letter+number code. + Required. :paramtype name: str :keyword tier: This field is required to be implemented by the Resource Provider if the service - has more than one tier, but is not required on a PUT. Possible values include: "Free", "Basic", - "Standard", "Premium". + has more than one tier, but is not required on a PUT. Known values are: "Free", "Basic", + "Standard", and "Premium". :paramtype tier: str or ~azure.mgmt.machinelearningservices.models.SkuTier """ - super(SkuSetting, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = name self.tier = tier -class SparkJob(JobBaseProperties): +class SparkJob(JobBaseProperties): # pylint: disable=too-many-instance-attributes """Spark job definition. Variables are only populated by the server, and will be ignored when sending a request. @@ -28586,7 +28425,7 @@ class SparkJob(JobBaseProperties): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar component_id: ARM resource ID of the component resource. :vartype component_id: str @@ -28603,8 +28442,8 @@ class SparkJob(JobBaseProperties): :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration :ivar is_archived: Is the asset archived?. :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType :ivar notification_setting: Notification setting for the job. :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting @@ -28614,19 +28453,19 @@ class SparkJob(JobBaseProperties): :ivar services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus :ivar archives: Archive files used in the job. :vartype archives: list[str] :ivar args: Arguments for the job. :vartype args: str - :ivar code_id: Required. [Required] ARM resource ID of the code asset. + :ivar code_id: [Required] ARM resource ID of the code asset. Required. :vartype code_id: str :ivar conf: Spark configured properties. :vartype conf: dict[str, str] - :ivar entry: Required. [Required] The entry to execute on startup of the job. + :ivar entry: [Required] The entry to execute on startup of the job. Required. :vartype entry: ~azure.mgmt.machinelearningservices.models.SparkJobEntry :ivar environment_id: The ARM resource ID of the Environment specification for the job. :vartype environment_id: str @@ -28647,78 +28486,78 @@ class SparkJob(JobBaseProperties): """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'code_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'entry': {'required': True}, + "job_type": {"required": True}, + "status": {"readonly": True}, + "code_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "entry": {"required": True}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'archives': {'key': 'archives', 'type': '[str]'}, - 'args': {'key': 'args', 'type': 'str'}, - 'code_id': {'key': 'codeId', 'type': 'str'}, - 'conf': {'key': 'conf', 'type': '{str}'}, - 'entry': {'key': 'entry', 'type': 'SparkJobEntry'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'files': {'key': 'files', 'type': '[str]'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'jars': {'key': 'jars', 'type': '[str]'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'py_files': {'key': 'pyFiles', 'type': '[str]'}, - 'queue_settings': {'key': 'queueSettings', 'type': 'QueueSettings'}, - 'resources': {'key': 'resources', 'type': 'SparkResourceConfiguration'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "archives": {"key": "archives", "type": "[str]"}, + "args": {"key": "args", "type": "str"}, + "code_id": {"key": "codeId", "type": "str"}, + "conf": {"key": "conf", "type": "{str}"}, + "entry": {"key": "entry", "type": "SparkJobEntry"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "files": {"key": "files", "type": "[str]"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "jars": {"key": "jars", "type": "[str]"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "py_files": {"key": "pyFiles", "type": "[str]"}, + "queue_settings": {"key": "queueSettings", "type": "QueueSettings"}, + "resources": {"key": "resources", "type": "SparkResourceConfiguration"}, } - def __init__( + def __init__( # pylint: disable=too-many-locals self, *, code_id: str, - entry: "SparkJobEntry", + entry: "_models.SparkJobEntry", description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, component_id: Optional[str] = None, compute_id: Optional[str] = None, display_name: Optional[str] = None, - experiment_name: Optional[str] = "Default", - identity: Optional["IdentityConfiguration"] = None, - is_archived: Optional[bool] = False, - notification_setting: Optional["NotificationSetting"] = None, - secrets_configuration: Optional[Dict[str, "SecretConfiguration"]] = None, - services: Optional[Dict[str, "JobService"]] = None, + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + notification_setting: Optional["_models.NotificationSetting"] = None, + secrets_configuration: Optional[Dict[str, "_models.SecretConfiguration"]] = None, + services: Optional[Dict[str, "_models.JobService"]] = None, archives: Optional[List[str]] = None, args: Optional[str] = None, conf: Optional[Dict[str, str]] = None, environment_id: Optional[str] = None, files: Optional[List[str]] = None, - inputs: Optional[Dict[str, "JobInput"]] = None, + inputs: Optional[Dict[str, "_models.JobInput"]] = None, jars: Optional[List[str]] = None, - outputs: Optional[Dict[str, "JobOutput"]] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, py_files: Optional[List[str]] = None, - queue_settings: Optional["QueueSettings"] = None, - resources: Optional["SparkResourceConfiguration"] = None, - **kwargs - ): + queue_settings: Optional["_models.QueueSettings"] = None, + resources: Optional["_models.SparkResourceConfiguration"] = None, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword component_id: ARM resource ID of the component resource. :paramtype component_id: str @@ -28747,11 +28586,11 @@ def __init__( :paramtype archives: list[str] :keyword args: Arguments for the job. :paramtype args: str - :keyword code_id: Required. [Required] ARM resource ID of the code asset. + :keyword code_id: [Required] ARM resource ID of the code asset. Required. :paramtype code_id: str :keyword conf: Spark configured properties. :paramtype conf: dict[str, str] - :keyword entry: Required. [Required] The entry to execute on startup of the job. + :keyword entry: [Required] The entry to execute on startup of the job. Required. :paramtype entry: ~azure.mgmt.machinelearningservices.models.SparkJobEntry :keyword environment_id: The ARM resource ID of the Environment specification for the job. :paramtype environment_id: str @@ -28770,8 +28609,22 @@ def __init__( :keyword resources: Compute Resource configuration for the job. :paramtype resources: ~azure.mgmt.machinelearningservices.models.SparkResourceConfiguration """ - super(SparkJob, self).__init__(description=description, properties=properties, tags=tags, component_id=component_id, compute_id=compute_id, display_name=display_name, experiment_name=experiment_name, identity=identity, is_archived=is_archived, notification_setting=notification_setting, secrets_configuration=secrets_configuration, services=services, **kwargs) - self.job_type = 'Spark' # type: str + super().__init__( + description=description, + properties=properties, + tags=tags, + component_id=component_id, + compute_id=compute_id, + display_name=display_name, + experiment_name=experiment_name, + identity=identity, + is_archived=is_archived, + notification_setting=notification_setting, + secrets_configuration=secrets_configuration, + services=services, + **kwargs + ) + self.job_type: str = "Spark" self.archives = archives self.args = args self.code_id = code_id @@ -28787,40 +28640,39 @@ def __init__( self.resources = resources -class SparkJobEntry(msrest.serialization.Model): +class SparkJobEntry(_serialization.Model): """Spark job entry point definition. - You probably want to use the sub-classes and not this class directly. Known - sub-classes are: SparkJobPythonEntry, SparkJobScalaEntry. + You probably want to use the sub-classes and not this class directly. Known sub-classes are: + SparkJobPythonEntry, SparkJobScalaEntry All required parameters must be populated in order to send to Azure. - :ivar spark_job_entry_type: Required. [Required] Type of the job's entry point.Constant filled - by server. Possible values include: "SparkJobPythonEntry", "SparkJobScalaEntry". + :ivar spark_job_entry_type: [Required] Type of the job's entry point. Required. Known values + are: "SparkJobPythonEntry" and "SparkJobScalaEntry". :vartype spark_job_entry_type: str or ~azure.mgmt.machinelearningservices.models.SparkJobEntryType """ _validation = { - 'spark_job_entry_type': {'required': True}, + "spark_job_entry_type": {"required": True}, } _attribute_map = { - 'spark_job_entry_type': {'key': 'sparkJobEntryType', 'type': 'str'}, + "spark_job_entry_type": {"key": "sparkJobEntryType", "type": "str"}, } _subtype_map = { - 'spark_job_entry_type': {'SparkJobPythonEntry': 'SparkJobPythonEntry', 'SparkJobScalaEntry': 'SparkJobScalaEntry'} + "spark_job_entry_type": { + "SparkJobPythonEntry": "SparkJobPythonEntry", + "SparkJobScalaEntry": "SparkJobScalaEntry", + } } - def __init__( - self, - **kwargs - ): - """ - """ - super(SparkJobEntry, self).__init__(**kwargs) - self.spark_job_entry_type = None # type: Optional[str] + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.spark_job_entry_type: Optional[str] = None class SparkJobPythonEntry(SparkJobEntry): @@ -28828,36 +28680,31 @@ class SparkJobPythonEntry(SparkJobEntry): All required parameters must be populated in order to send to Azure. - :ivar spark_job_entry_type: Required. [Required] Type of the job's entry point.Constant filled - by server. Possible values include: "SparkJobPythonEntry", "SparkJobScalaEntry". + :ivar spark_job_entry_type: [Required] Type of the job's entry point. Required. Known values + are: "SparkJobPythonEntry" and "SparkJobScalaEntry". :vartype spark_job_entry_type: str or ~azure.mgmt.machinelearningservices.models.SparkJobEntryType - :ivar file: Required. [Required] Relative python file path for job entry point. + :ivar file: [Required] Relative python file path for job entry point. Required. :vartype file: str """ _validation = { - 'spark_job_entry_type': {'required': True}, - 'file': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "spark_job_entry_type": {"required": True}, + "file": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'spark_job_entry_type': {'key': 'sparkJobEntryType', 'type': 'str'}, - 'file': {'key': 'file', 'type': 'str'}, + "spark_job_entry_type": {"key": "sparkJobEntryType", "type": "str"}, + "file": {"key": "file", "type": "str"}, } - def __init__( - self, - *, - file: str, - **kwargs - ): + def __init__(self, *, file: str, **kwargs: Any) -> None: """ - :keyword file: Required. [Required] Relative python file path for job entry point. + :keyword file: [Required] Relative python file path for job entry point. Required. :paramtype file: str """ - super(SparkJobPythonEntry, self).__init__(**kwargs) - self.spark_job_entry_type = 'SparkJobPythonEntry' # type: str + super().__init__(**kwargs) + self.spark_job_entry_type: str = "SparkJobPythonEntry" self.file = file @@ -28866,40 +28713,35 @@ class SparkJobScalaEntry(SparkJobEntry): All required parameters must be populated in order to send to Azure. - :ivar spark_job_entry_type: Required. [Required] Type of the job's entry point.Constant filled - by server. Possible values include: "SparkJobPythonEntry", "SparkJobScalaEntry". + :ivar spark_job_entry_type: [Required] Type of the job's entry point. Required. Known values + are: "SparkJobPythonEntry" and "SparkJobScalaEntry". :vartype spark_job_entry_type: str or ~azure.mgmt.machinelearningservices.models.SparkJobEntryType - :ivar class_name: Required. [Required] Scala class name used as entry point. + :ivar class_name: [Required] Scala class name used as entry point. Required. :vartype class_name: str """ _validation = { - 'spark_job_entry_type': {'required': True}, - 'class_name': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "spark_job_entry_type": {"required": True}, + "class_name": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'spark_job_entry_type': {'key': 'sparkJobEntryType', 'type': 'str'}, - 'class_name': {'key': 'className', 'type': 'str'}, + "spark_job_entry_type": {"key": "sparkJobEntryType", "type": "str"}, + "class_name": {"key": "className", "type": "str"}, } - def __init__( - self, - *, - class_name: str, - **kwargs - ): + def __init__(self, *, class_name: str, **kwargs: Any) -> None: """ - :keyword class_name: Required. [Required] Scala class name used as entry point. + :keyword class_name: [Required] Scala class name used as entry point. Required. :paramtype class_name: str """ - super(SparkJobScalaEntry, self).__init__(**kwargs) - self.spark_job_entry_type = 'SparkJobScalaEntry' # type: str + super().__init__(**kwargs) + self.spark_job_entry_type: str = "SparkJobScalaEntry" self.class_name = class_name -class SparkResourceConfiguration(msrest.serialization.Model): +class SparkResourceConfiguration(_serialization.Model): """SparkResourceConfiguration. :ivar instance_type: Optional type of VM used as supported by the compute target. @@ -28909,33 +28751,27 @@ class SparkResourceConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'instance_type': {'key': 'instanceType', 'type': 'str'}, - 'runtime_version': {'key': 'runtimeVersion', 'type': 'str'}, + "instance_type": {"key": "instanceType", "type": "str"}, + "runtime_version": {"key": "runtimeVersion", "type": "str"}, } - def __init__( - self, - *, - instance_type: Optional[str] = None, - runtime_version: Optional[str] = "3.1", - **kwargs - ): + def __init__(self, *, instance_type: Optional[str] = None, runtime_version: str = "3.1", **kwargs: Any) -> None: """ :keyword instance_type: Optional type of VM used as supported by the compute target. :paramtype instance_type: str :keyword runtime_version: Version of spark runtime used for the job. :paramtype runtime_version: str """ - super(SparkResourceConfiguration, self).__init__(**kwargs) + super().__init__(**kwargs) self.instance_type = instance_type self.runtime_version = runtime_version -class SslConfiguration(msrest.serialization.Model): +class SslConfiguration(_serialization.Model): """The ssl configuration for scoring. - :ivar status: Enable or disable ssl for scoring. Possible values include: "Disabled", - "Enabled", "Auto". + :ivar status: Enable or disable ssl for scoring. Known values are: "Disabled", "Enabled", and + "Auto". :vartype status: str or ~azure.mgmt.machinelearningservices.models.SslConfigStatus :ivar cert: Cert data. :vartype cert: str @@ -28950,28 +28786,28 @@ class SslConfiguration(msrest.serialization.Model): """ _attribute_map = { - 'status': {'key': 'status', 'type': 'str'}, - 'cert': {'key': 'cert', 'type': 'str'}, - 'key': {'key': 'key', 'type': 'str'}, - 'cname': {'key': 'cname', 'type': 'str'}, - 'leaf_domain_label': {'key': 'leafDomainLabel', 'type': 'str'}, - 'overwrite_existing_domain': {'key': 'overwriteExistingDomain', 'type': 'bool'}, + "status": {"key": "status", "type": "str"}, + "cert": {"key": "cert", "type": "str"}, + "key": {"key": "key", "type": "str"}, + "cname": {"key": "cname", "type": "str"}, + "leaf_domain_label": {"key": "leafDomainLabel", "type": "str"}, + "overwrite_existing_domain": {"key": "overwriteExistingDomain", "type": "bool"}, } def __init__( self, *, - status: Optional[Union[str, "SslConfigStatus"]] = None, + status: Optional[Union[str, "_models.SslConfigStatus"]] = None, cert: Optional[str] = None, key: Optional[str] = None, cname: Optional[str] = None, leaf_domain_label: Optional[str] = None, overwrite_existing_domain: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword status: Enable or disable ssl for scoring. Possible values include: "Disabled", - "Enabled", "Auto". + :keyword status: Enable or disable ssl for scoring. Known values are: "Disabled", "Enabled", + and "Auto". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.SslConfigStatus :keyword cert: Cert data. :paramtype cert: str @@ -28984,7 +28820,7 @@ def __init__( :keyword overwrite_existing_domain: Indicates whether to overwrite existing domain label. :paramtype overwrite_existing_domain: bool """ - super(SslConfiguration, self).__init__(**kwargs) + super().__init__(**kwargs) self.status = status self.cert = cert self.key = key @@ -28993,54 +28829,54 @@ def __init__( self.overwrite_existing_domain = overwrite_existing_domain -class StackEnsembleSettings(msrest.serialization.Model): +class StackEnsembleSettings(_serialization.Model): """Advances setting to customize StackEnsemble run. :ivar stack_meta_learner_k_wargs: Optional parameters to pass to the initializer of the meta-learner. - :vartype stack_meta_learner_k_wargs: any + :vartype stack_meta_learner_k_wargs: JSON :ivar stack_meta_learner_train_percentage: Specifies the proportion of the training set (when choosing train and validation type of training) to be reserved for training the meta-learner. Default value is 0.2. :vartype stack_meta_learner_train_percentage: float :ivar stack_meta_learner_type: The meta-learner is a model trained on the output of the - individual heterogeneous models. Possible values include: "None", "LogisticRegression", + individual heterogeneous models. Known values are: "None", "LogisticRegression", "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", - "LightGBMRegressor", "LinearRegression". + "LightGBMRegressor", and "LinearRegression". :vartype stack_meta_learner_type: str or ~azure.mgmt.machinelearningservices.models.StackMetaLearnerType """ _attribute_map = { - 'stack_meta_learner_k_wargs': {'key': 'stackMetaLearnerKWargs', 'type': 'object'}, - 'stack_meta_learner_train_percentage': {'key': 'stackMetaLearnerTrainPercentage', 'type': 'float'}, - 'stack_meta_learner_type': {'key': 'stackMetaLearnerType', 'type': 'str'}, + "stack_meta_learner_k_wargs": {"key": "stackMetaLearnerKWargs", "type": "object"}, + "stack_meta_learner_train_percentage": {"key": "stackMetaLearnerTrainPercentage", "type": "float"}, + "stack_meta_learner_type": {"key": "stackMetaLearnerType", "type": "str"}, } def __init__( self, *, - stack_meta_learner_k_wargs: Optional[Any] = None, - stack_meta_learner_train_percentage: Optional[float] = 0.2, - stack_meta_learner_type: Optional[Union[str, "StackMetaLearnerType"]] = None, - **kwargs - ): + stack_meta_learner_k_wargs: Optional[JSON] = None, + stack_meta_learner_train_percentage: float = 0.2, + stack_meta_learner_type: Optional[Union[str, "_models.StackMetaLearnerType"]] = None, + **kwargs: Any + ) -> None: """ :keyword stack_meta_learner_k_wargs: Optional parameters to pass to the initializer of the meta-learner. - :paramtype stack_meta_learner_k_wargs: any + :paramtype stack_meta_learner_k_wargs: JSON :keyword stack_meta_learner_train_percentage: Specifies the proportion of the training set (when choosing train and validation type of training) to be reserved for training the meta-learner. Default value is 0.2. :paramtype stack_meta_learner_train_percentage: float :keyword stack_meta_learner_type: The meta-learner is a model trained on the output of the - individual heterogeneous models. Possible values include: "None", "LogisticRegression", + individual heterogeneous models. Known values are: "None", "LogisticRegression", "LogisticRegressionCV", "LightGBMClassifier", "ElasticNet", "ElasticNetCV", - "LightGBMRegressor", "LinearRegression". + "LightGBMRegressor", and "LinearRegression". :paramtype stack_meta_learner_type: str or ~azure.mgmt.machinelearningservices.models.StackMetaLearnerType """ - super(StackEnsembleSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.stack_meta_learner_k_wargs = stack_meta_learner_k_wargs self.stack_meta_learner_train_percentage = stack_meta_learner_train_percentage self.stack_meta_learner_type = stack_meta_learner_type @@ -29055,82 +28891,83 @@ class StaticInputData(MonitoringInputDataBase): :vartype columns: dict[str, str] :ivar data_context: The context metadata of the data source. :vartype data_context: str - :ivar input_data_type: Required. [Required] Specifies the type of signal to monitor.Constant - filled by server. Possible values include: "Static", "Trailing", "Fixed". + :ivar input_data_type: [Required] Specifies the type of signal to monitor. Required. Known + values are: "Static", "Trailing", and "Fixed". :vartype input_data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringInputDataType - :ivar job_input_type: Required. [Required] Specifies the type of job. Possible values include: - "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", "triton_model". + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :ivar uri: Required. [Required] Input Asset URI. + :ivar uri: [Required] Input Asset URI. Required. :vartype uri: str :ivar preprocessing_component_id: The ARM resource ID of the component resource used to preprocess the data. :vartype preprocessing_component_id: str - :ivar window_end: Required. [Required] The end date of the data window. + :ivar window_end: [Required] The end date of the data window. Required. :vartype window_end: ~datetime.datetime - :ivar window_start: Required. [Required] The start date of the data window. + :ivar window_start: [Required] The start date of the data window. Required. :vartype window_start: ~datetime.datetime """ _validation = { - 'input_data_type': {'required': True}, - 'job_input_type': {'required': True}, - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'window_end': {'required': True}, - 'window_start': {'required': True}, + "input_data_type": {"required": True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "window_end": {"required": True}, + "window_start": {"required": True}, } _attribute_map = { - 'columns': {'key': 'columns', 'type': '{str}'}, - 'data_context': {'key': 'dataContext', 'type': 'str'}, - 'input_data_type': {'key': 'inputDataType', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'preprocessing_component_id': {'key': 'preprocessingComponentId', 'type': 'str'}, - 'window_end': {'key': 'windowEnd', 'type': 'iso-8601'}, - 'window_start': {'key': 'windowStart', 'type': 'iso-8601'}, + "columns": {"key": "columns", "type": "{str}"}, + "data_context": {"key": "dataContext", "type": "str"}, + "input_data_type": {"key": "inputDataType", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "preprocessing_component_id": {"key": "preprocessingComponentId", "type": "str"}, + "window_end": {"key": "windowEnd", "type": "iso-8601"}, + "window_start": {"key": "windowStart", "type": "iso-8601"}, } def __init__( self, *, - job_input_type: Union[str, "JobInputType"], + job_input_type: Union[str, "_models.JobInputType"], uri: str, window_end: datetime.datetime, window_start: datetime.datetime, columns: Optional[Dict[str, str]] = None, data_context: Optional[str] = None, preprocessing_component_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword columns: Mapping of column names to special uses. :paramtype columns: dict[str, str] :keyword data_context: The context metadata of the data source. :paramtype data_context: str - :keyword job_input_type: Required. [Required] Specifies the type of job. Possible values - include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", + :keyword job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". :paramtype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :keyword uri: Required. [Required] Input Asset URI. + :keyword uri: [Required] Input Asset URI. Required. :paramtype uri: str :keyword preprocessing_component_id: The ARM resource ID of the component resource used to preprocess the data. :paramtype preprocessing_component_id: str - :keyword window_end: Required. [Required] The end date of the data window. + :keyword window_end: [Required] The end date of the data window. Required. :paramtype window_end: ~datetime.datetime - :keyword window_start: Required. [Required] The start date of the data window. + :keyword window_start: [Required] The start date of the data window. Required. :paramtype window_start: ~datetime.datetime """ - super(StaticInputData, self).__init__(columns=columns, data_context=data_context, job_input_type=job_input_type, uri=uri, **kwargs) - self.input_data_type = 'Static' # type: str + super().__init__(columns=columns, data_context=data_context, job_input_type=job_input_type, uri=uri, **kwargs) + self.input_data_type: str = "Static" self.preprocessing_component_id = preprocessing_component_id self.window_end = window_end self.window_start = window_start -class StatusMessage(msrest.serialization.Model): +class StatusMessage(_serialization.Model): """Active message associated with project. Variables are only populated by the server, and will be ignored when sending a request. @@ -29139,7 +28976,7 @@ class StatusMessage(msrest.serialization.Model): :vartype code: str :ivar created_date_time: Time in UTC at which the message was created. :vartype created_date_time: ~datetime.datetime - :ivar level: Severity level of message. Possible values include: "Error", "Information", + :ivar level: Severity level of message. Known values are: "Error", "Information", and "Warning". :vartype level: str or ~azure.mgmt.machinelearningservices.models.StatusMessageLevel :ivar message: A human-readable representation of the message code. @@ -29147,33 +28984,29 @@ class StatusMessage(msrest.serialization.Model): """ _validation = { - 'code': {'readonly': True}, - 'created_date_time': {'readonly': True}, - 'level': {'readonly': True}, - 'message': {'readonly': True}, + "code": {"readonly": True}, + "created_date_time": {"readonly": True}, + "level": {"readonly": True}, + "message": {"readonly": True}, } _attribute_map = { - 'code': {'key': 'code', 'type': 'str'}, - 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'}, - 'level': {'key': 'level', 'type': 'str'}, - 'message': {'key': 'message', 'type': 'str'}, + "code": {"key": "code", "type": "str"}, + "created_date_time": {"key": "createdDateTime", "type": "iso-8601"}, + "level": {"key": "level", "type": "str"}, + "message": {"key": "message", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(StatusMessage, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.code = None self.created_date_time = None self.level = None self.message = None -class StorageAccountDetails(msrest.serialization.Model): +class StorageAccountDetails(_serialization.Model): """Details of storage account to be used for the Registry. :ivar system_created_storage_account: Details of system created storage account to be used for @@ -29187,17 +29020,17 @@ class StorageAccountDetails(msrest.serialization.Model): """ _attribute_map = { - 'system_created_storage_account': {'key': 'systemCreatedStorageAccount', 'type': 'SystemCreatedStorageAccount'}, - 'user_created_storage_account': {'key': 'userCreatedStorageAccount', 'type': 'UserCreatedStorageAccount'}, + "system_created_storage_account": {"key": "systemCreatedStorageAccount", "type": "SystemCreatedStorageAccount"}, + "user_created_storage_account": {"key": "userCreatedStorageAccount", "type": "UserCreatedStorageAccount"}, } def __init__( self, *, - system_created_storage_account: Optional["SystemCreatedStorageAccount"] = None, - user_created_storage_account: Optional["UserCreatedStorageAccount"] = None, - **kwargs - ): + system_created_storage_account: Optional["_models.SystemCreatedStorageAccount"] = None, + user_created_storage_account: Optional["_models.UserCreatedStorageAccount"] = None, + **kwargs: Any + ) -> None: """ :keyword system_created_storage_account: Details of system created storage account to be used for the registry. @@ -29208,12 +29041,12 @@ def __init__( :paramtype user_created_storage_account: ~azure.mgmt.machinelearningservices.models.UserCreatedStorageAccount """ - super(StorageAccountDetails, self).__init__(**kwargs) + super().__init__(**kwargs) self.system_created_storage_account = system_created_storage_account self.user_created_storage_account = user_created_storage_account -class SweepJob(JobBaseProperties): +class SweepJob(JobBaseProperties): # pylint: disable=too-many-instance-attributes """Sweep job definition. Variables are only populated by the server, and will be ignored when sending a request. @@ -29224,7 +29057,7 @@ class SweepJob(JobBaseProperties): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar component_id: ARM resource ID of the component resource. :vartype component_id: str @@ -29241,8 +29074,8 @@ class SweepJob(JobBaseProperties): :vartype identity: ~azure.mgmt.machinelearningservices.models.IdentityConfiguration :ivar is_archived: Is the asset archived?. :vartype is_archived: bool - :ivar job_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "AutoML", "Command", "Labeling", "Sweep", "Pipeline", "Spark". + :ivar job_type: [Required] Specifies the type of job. Required. Known values are: "AutoML", + "Command", "Labeling", "Sweep", "Pipeline", and "Spark". :vartype job_type: str or ~azure.mgmt.machinelearningservices.models.JobType :ivar notification_setting: Notification setting for the job. :vartype notification_setting: ~azure.mgmt.machinelearningservices.models.NotificationSetting @@ -29252,9 +29085,9 @@ class SweepJob(JobBaseProperties): :ivar services: List of JobEndpoints. For local jobs, a job endpoint will have an endpoint value of FileStreamObject. :vartype services: dict[str, ~azure.mgmt.machinelearningservices.models.JobService] - :ivar status: Status of the job. Possible values include: "NotStarted", "Starting", - "Provisioning", "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", - "Failed", "Canceled", "NotResponding", "Paused", "Unknown", "Scheduled". + :ivar status: Status of the job. Known values are: "NotStarted", "Starting", "Provisioning", + "Preparing", "Queued", "Running", "Finalizing", "CancelRequested", "Completed", "Failed", + "Canceled", "NotResponding", "Paused", "Unknown", and "Scheduled". :vartype status: str or ~azure.mgmt.machinelearningservices.models.JobStatus :ivar component_configuration: Component Configuration for sweep over component. :vartype component_configuration: @@ -29266,7 +29099,7 @@ class SweepJob(JobBaseProperties): :vartype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] :ivar limits: Sweep Job limit. :vartype limits: ~azure.mgmt.machinelearningservices.models.SweepJobLimits - :ivar objective: Required. [Required] Optimization objective. + :ivar objective: [Required] Optimization objective. Required. :vartype objective: ~azure.mgmt.machinelearningservices.models.Objective :ivar outputs: Mapping of output data bindings used in the job. :vartype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] @@ -29274,86 +29107,86 @@ class SweepJob(JobBaseProperties): :vartype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings :ivar resources: Compute Resource configuration for the job. :vartype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration - :ivar sampling_algorithm: Required. [Required] The hyperparameter sampling algorithm. + :ivar sampling_algorithm: [Required] The hyperparameter sampling algorithm. Required. :vartype sampling_algorithm: ~azure.mgmt.machinelearningservices.models.SamplingAlgorithm - :ivar search_space: Required. [Required] A dictionary containing each parameter and its - distribution. The dictionary key is the name of the parameter. - :vartype search_space: any - :ivar trial: Required. [Required] Trial component definition. + :ivar search_space: [Required] A dictionary containing each parameter and its distribution. The + dictionary key is the name of the parameter. Required. + :vartype search_space: JSON + :ivar trial: [Required] Trial component definition. Required. :vartype trial: ~azure.mgmt.machinelearningservices.models.TrialComponent """ _validation = { - 'job_type': {'required': True}, - 'status': {'readonly': True}, - 'objective': {'required': True}, - 'sampling_algorithm': {'required': True}, - 'search_space': {'required': True}, - 'trial': {'required': True}, - } - - _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'component_id': {'key': 'componentId', 'type': 'str'}, - 'compute_id': {'key': 'computeId', 'type': 'str'}, - 'display_name': {'key': 'displayName', 'type': 'str'}, - 'experiment_name': {'key': 'experimentName', 'type': 'str'}, - 'identity': {'key': 'identity', 'type': 'IdentityConfiguration'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'job_type': {'key': 'jobType', 'type': 'str'}, - 'notification_setting': {'key': 'notificationSetting', 'type': 'NotificationSetting'}, - 'secrets_configuration': {'key': 'secretsConfiguration', 'type': '{SecretConfiguration}'}, - 'services': {'key': 'services', 'type': '{JobService}'}, - 'status': {'key': 'status', 'type': 'str'}, - 'component_configuration': {'key': 'componentConfiguration', 'type': 'ComponentConfiguration'}, - 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, - 'inputs': {'key': 'inputs', 'type': '{JobInput}'}, - 'limits': {'key': 'limits', 'type': 'SweepJobLimits'}, - 'objective': {'key': 'objective', 'type': 'Objective'}, - 'outputs': {'key': 'outputs', 'type': '{JobOutput}'}, - 'queue_settings': {'key': 'queueSettings', 'type': 'QueueSettings'}, - 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, - 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'SamplingAlgorithm'}, - 'search_space': {'key': 'searchSpace', 'type': 'object'}, - 'trial': {'key': 'trial', 'type': 'TrialComponent'}, - } - - def __init__( - self, - *, - objective: "Objective", - sampling_algorithm: "SamplingAlgorithm", - search_space: Any, - trial: "TrialComponent", + "job_type": {"required": True}, + "status": {"readonly": True}, + "objective": {"required": True}, + "sampling_algorithm": {"required": True}, + "search_space": {"required": True}, + "trial": {"required": True}, + } + + _attribute_map = { + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "component_id": {"key": "componentId", "type": "str"}, + "compute_id": {"key": "computeId", "type": "str"}, + "display_name": {"key": "displayName", "type": "str"}, + "experiment_name": {"key": "experimentName", "type": "str"}, + "identity": {"key": "identity", "type": "IdentityConfiguration"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "job_type": {"key": "jobType", "type": "str"}, + "notification_setting": {"key": "notificationSetting", "type": "NotificationSetting"}, + "secrets_configuration": {"key": "secretsConfiguration", "type": "{SecretConfiguration}"}, + "services": {"key": "services", "type": "{JobService}"}, + "status": {"key": "status", "type": "str"}, + "component_configuration": {"key": "componentConfiguration", "type": "ComponentConfiguration"}, + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "inputs": {"key": "inputs", "type": "{JobInput}"}, + "limits": {"key": "limits", "type": "SweepJobLimits"}, + "objective": {"key": "objective", "type": "Objective"}, + "outputs": {"key": "outputs", "type": "{JobOutput}"}, + "queue_settings": {"key": "queueSettings", "type": "QueueSettings"}, + "resources": {"key": "resources", "type": "JobResourceConfiguration"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "SamplingAlgorithm"}, + "search_space": {"key": "searchSpace", "type": "object"}, + "trial": {"key": "trial", "type": "TrialComponent"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + objective: "_models.Objective", + sampling_algorithm: "_models.SamplingAlgorithm", + search_space: JSON, + trial: "_models.TrialComponent", description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, component_id: Optional[str] = None, compute_id: Optional[str] = None, display_name: Optional[str] = None, - experiment_name: Optional[str] = "Default", - identity: Optional["IdentityConfiguration"] = None, - is_archived: Optional[bool] = False, - notification_setting: Optional["NotificationSetting"] = None, - secrets_configuration: Optional[Dict[str, "SecretConfiguration"]] = None, - services: Optional[Dict[str, "JobService"]] = None, - component_configuration: Optional["ComponentConfiguration"] = None, - early_termination: Optional["EarlyTerminationPolicy"] = None, - inputs: Optional[Dict[str, "JobInput"]] = None, - limits: Optional["SweepJobLimits"] = None, - outputs: Optional[Dict[str, "JobOutput"]] = None, - queue_settings: Optional["QueueSettings"] = None, - resources: Optional["JobResourceConfiguration"] = None, - **kwargs - ): + experiment_name: str = "Default", + identity: Optional["_models.IdentityConfiguration"] = None, + is_archived: bool = False, + notification_setting: Optional["_models.NotificationSetting"] = None, + secrets_configuration: Optional[Dict[str, "_models.SecretConfiguration"]] = None, + services: Optional[Dict[str, "_models.JobService"]] = None, + component_configuration: Optional["_models.ComponentConfiguration"] = None, + early_termination: Optional["_models.EarlyTerminationPolicy"] = None, + inputs: Optional[Dict[str, "_models.JobInput"]] = None, + limits: Optional["_models.SweepJobLimits"] = None, + outputs: Optional[Dict[str, "_models.JobOutput"]] = None, + queue_settings: Optional["_models.QueueSettings"] = None, + resources: Optional["_models.JobResourceConfiguration"] = None, + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword component_id: ARM resource ID of the component resource. :paramtype component_id: str @@ -29388,7 +29221,7 @@ def __init__( :paramtype inputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobInput] :keyword limits: Sweep Job limit. :paramtype limits: ~azure.mgmt.machinelearningservices.models.SweepJobLimits - :keyword objective: Required. [Required] Optimization objective. + :keyword objective: [Required] Optimization objective. Required. :paramtype objective: ~azure.mgmt.machinelearningservices.models.Objective :keyword outputs: Mapping of output data bindings used in the job. :paramtype outputs: dict[str, ~azure.mgmt.machinelearningservices.models.JobOutput] @@ -29396,16 +29229,30 @@ def __init__( :paramtype queue_settings: ~azure.mgmt.machinelearningservices.models.QueueSettings :keyword resources: Compute Resource configuration for the job. :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration - :keyword sampling_algorithm: Required. [Required] The hyperparameter sampling algorithm. + :keyword sampling_algorithm: [Required] The hyperparameter sampling algorithm. Required. :paramtype sampling_algorithm: ~azure.mgmt.machinelearningservices.models.SamplingAlgorithm - :keyword search_space: Required. [Required] A dictionary containing each parameter and its - distribution. The dictionary key is the name of the parameter. - :paramtype search_space: any - :keyword trial: Required. [Required] Trial component definition. + :keyword search_space: [Required] A dictionary containing each parameter and its distribution. + The dictionary key is the name of the parameter. Required. + :paramtype search_space: JSON + :keyword trial: [Required] Trial component definition. Required. :paramtype trial: ~azure.mgmt.machinelearningservices.models.TrialComponent """ - super(SweepJob, self).__init__(description=description, properties=properties, tags=tags, component_id=component_id, compute_id=compute_id, display_name=display_name, experiment_name=experiment_name, identity=identity, is_archived=is_archived, notification_setting=notification_setting, secrets_configuration=secrets_configuration, services=services, **kwargs) - self.job_type = 'Sweep' # type: str + super().__init__( + description=description, + properties=properties, + tags=tags, + component_id=component_id, + compute_id=compute_id, + display_name=display_name, + experiment_name=experiment_name, + identity=identity, + is_archived=is_archived, + notification_setting=notification_setting, + secrets_configuration=secrets_configuration, + services=services, + **kwargs + ) + self.job_type: str = "Sweep" self.component_configuration = component_configuration self.early_termination = early_termination self.inputs = inputs @@ -29424,8 +29271,8 @@ class SweepJobLimits(JobLimits): All required parameters must be populated in order to send to Azure. - :ivar job_limits_type: Required. [Required] JobLimit type.Constant filled by server. Possible - values include: "Command", "Sweep". + :ivar job_limits_type: [Required] JobLimit type. Required. Known values are: "Command" and + "Sweep". :vartype job_limits_type: str or ~azure.mgmt.machinelearningservices.models.JobLimitsType :ivar timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. Only supports duration with precision as low as Seconds. @@ -29439,15 +29286,15 @@ class SweepJobLimits(JobLimits): """ _validation = { - 'job_limits_type': {'required': True}, + "job_limits_type": {"required": True}, } _attribute_map = { - 'job_limits_type': {'key': 'jobLimitsType', 'type': 'str'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, - 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, - 'max_total_trials': {'key': 'maxTotalTrials', 'type': 'int'}, - 'trial_timeout': {'key': 'trialTimeout', 'type': 'duration'}, + "job_limits_type": {"key": "jobLimitsType", "type": "str"}, + "timeout": {"key": "timeout", "type": "duration"}, + "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, + "max_total_trials": {"key": "maxTotalTrials", "type": "int"}, + "trial_timeout": {"key": "trialTimeout", "type": "duration"}, } def __init__( @@ -29457,8 +29304,8 @@ def __init__( max_concurrent_trials: Optional[int] = None, max_total_trials: Optional[int] = None, trial_timeout: Optional[datetime.timedelta] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword timeout: The max run duration in ISO 8601 format, after which the job will be cancelled. Only supports duration with precision as low as Seconds. @@ -29470,29 +29317,29 @@ def __init__( :keyword trial_timeout: Sweep Job Trial timeout value. :paramtype trial_timeout: ~datetime.timedelta """ - super(SweepJobLimits, self).__init__(timeout=timeout, **kwargs) - self.job_limits_type = 'Sweep' # type: str + super().__init__(timeout=timeout, **kwargs) + self.job_limits_type: str = "Sweep" self.max_concurrent_trials = max_concurrent_trials self.max_total_trials = max_total_trials self.trial_timeout = trial_timeout -class SynapseSpark(Compute): +class SynapseSpark(Compute): # pylint: disable=too-many-instance-attributes """A SynapseSpark compute. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType :ivar compute_location: Location for the underlying compute. :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ProvisioningState :ivar description: The description of the Machine Learning compute. @@ -29516,26 +29363,26 @@ class SynapseSpark(Compute): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, - 'properties': {'key': 'properties', 'type': 'SynapseSparkProperties'}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, + "properties": {"key": "properties", "type": "SynapseSparkProperties"}, } def __init__( @@ -29545,9 +29392,9 @@ def __init__( description: Optional[str] = None, resource_id: Optional[str] = None, disable_local_auth: Optional[bool] = None, - properties: Optional["SynapseSparkProperties"] = None, - **kwargs - ): + properties: Optional["_models.SynapseSparkProperties"] = None, + **kwargs: Any + ) -> None: """ :keyword compute_location: Location for the underlying compute. :paramtype compute_location: str @@ -29561,12 +29408,18 @@ def __init__( :keyword properties: :paramtype properties: ~azure.mgmt.machinelearningservices.models.SynapseSparkProperties """ - super(SynapseSpark, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, **kwargs) - self.compute_type = 'SynapseSpark' # type: str + super().__init__( + compute_location=compute_location, + description=description, + resource_id=resource_id, + disable_local_auth=disable_local_auth, + **kwargs + ) + self.compute_type: str = "SynapseSpark" self.properties = properties -class SynapseSparkProperties(msrest.serialization.Model): +class SynapseSparkProperties(_serialization.Model): """SynapseSparkProperties. :ivar auto_scale_properties: Auto scale properties. @@ -29592,23 +29445,23 @@ class SynapseSparkProperties(msrest.serialization.Model): """ _attribute_map = { - 'auto_scale_properties': {'key': 'autoScaleProperties', 'type': 'AutoScaleProperties'}, - 'auto_pause_properties': {'key': 'autoPauseProperties', 'type': 'AutoPauseProperties'}, - 'spark_version': {'key': 'sparkVersion', 'type': 'str'}, - 'node_count': {'key': 'nodeCount', 'type': 'int'}, - 'node_size': {'key': 'nodeSize', 'type': 'str'}, - 'node_size_family': {'key': 'nodeSizeFamily', 'type': 'str'}, - 'subscription_id': {'key': 'subscriptionId', 'type': 'str'}, - 'resource_group': {'key': 'resourceGroup', 'type': 'str'}, - 'workspace_name': {'key': 'workspaceName', 'type': 'str'}, - 'pool_name': {'key': 'poolName', 'type': 'str'}, + "auto_scale_properties": {"key": "autoScaleProperties", "type": "AutoScaleProperties"}, + "auto_pause_properties": {"key": "autoPauseProperties", "type": "AutoPauseProperties"}, + "spark_version": {"key": "sparkVersion", "type": "str"}, + "node_count": {"key": "nodeCount", "type": "int"}, + "node_size": {"key": "nodeSize", "type": "str"}, + "node_size_family": {"key": "nodeSizeFamily", "type": "str"}, + "subscription_id": {"key": "subscriptionId", "type": "str"}, + "resource_group": {"key": "resourceGroup", "type": "str"}, + "workspace_name": {"key": "workspaceName", "type": "str"}, + "pool_name": {"key": "poolName", "type": "str"}, } def __init__( self, *, - auto_scale_properties: Optional["AutoScaleProperties"] = None, - auto_pause_properties: Optional["AutoPauseProperties"] = None, + auto_scale_properties: Optional["_models.AutoScaleProperties"] = None, + auto_pause_properties: Optional["_models.AutoPauseProperties"] = None, spark_version: Optional[str] = None, node_count: Optional[int] = None, node_size: Optional[str] = None, @@ -29617,8 +29470,8 @@ def __init__( resource_group: Optional[str] = None, workspace_name: Optional[str] = None, pool_name: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword auto_scale_properties: Auto scale properties. :paramtype auto_scale_properties: @@ -29643,7 +29496,7 @@ def __init__( :keyword pool_name: Pool name. :paramtype pool_name: str """ - super(SynapseSparkProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.auto_scale_properties = auto_scale_properties self.auto_pause_properties = auto_pause_properties self.spark_version = spark_version @@ -29656,7 +29509,7 @@ def __init__( self.pool_name = pool_name -class SystemCreatedAcrAccount(msrest.serialization.Model): +class SystemCreatedAcrAccount(_serialization.Model): """SystemCreatedAcrAccount. :ivar acr_account_name: Name of the ACR account. @@ -29668,9 +29521,9 @@ class SystemCreatedAcrAccount(msrest.serialization.Model): """ _attribute_map = { - 'acr_account_name': {'key': 'acrAccountName', 'type': 'str'}, - 'acr_account_sku': {'key': 'acrAccountSku', 'type': 'str'}, - 'arm_resource_id': {'key': 'armResourceId', 'type': 'ArmResourceId'}, + "acr_account_name": {"key": "acrAccountName", "type": "str"}, + "acr_account_sku": {"key": "acrAccountSku", "type": "str"}, + "arm_resource_id": {"key": "armResourceId", "type": "ArmResourceId"}, } def __init__( @@ -29678,9 +29531,9 @@ def __init__( *, acr_account_name: Optional[str] = None, acr_account_sku: Optional[str] = None, - arm_resource_id: Optional["ArmResourceId"] = None, - **kwargs - ): + arm_resource_id: Optional["_models.ArmResourceId"] = None, + **kwargs: Any + ) -> None: """ :keyword acr_account_name: Name of the ACR account. :paramtype acr_account_name: str @@ -29689,13 +29542,13 @@ def __init__( :keyword arm_resource_id: This is populated once the ACR account is created. :paramtype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId """ - super(SystemCreatedAcrAccount, self).__init__(**kwargs) + super().__init__(**kwargs) self.acr_account_name = acr_account_name self.acr_account_sku = acr_account_sku self.arm_resource_id = arm_resource_id -class SystemCreatedStorageAccount(msrest.serialization.Model): +class SystemCreatedStorageAccount(_serialization.Model): """SystemCreatedStorageAccount. :ivar allow_blob_public_access: Public blob access allowed. @@ -29719,23 +29572,23 @@ class SystemCreatedStorageAccount(msrest.serialization.Model): """ _attribute_map = { - 'allow_blob_public_access': {'key': 'allowBlobPublicAccess', 'type': 'bool'}, - 'arm_resource_id': {'key': 'armResourceId', 'type': 'ArmResourceId'}, - 'storage_account_hns_enabled': {'key': 'storageAccountHnsEnabled', 'type': 'bool'}, - 'storage_account_name': {'key': 'storageAccountName', 'type': 'str'}, - 'storage_account_type': {'key': 'storageAccountType', 'type': 'str'}, + "allow_blob_public_access": {"key": "allowBlobPublicAccess", "type": "bool"}, + "arm_resource_id": {"key": "armResourceId", "type": "ArmResourceId"}, + "storage_account_hns_enabled": {"key": "storageAccountHnsEnabled", "type": "bool"}, + "storage_account_name": {"key": "storageAccountName", "type": "str"}, + "storage_account_type": {"key": "storageAccountType", "type": "str"}, } def __init__( self, *, allow_blob_public_access: Optional[bool] = None, - arm_resource_id: Optional["ArmResourceId"] = None, + arm_resource_id: Optional["_models.ArmResourceId"] = None, storage_account_hns_enabled: Optional[bool] = None, storage_account_name: Optional[str] = None, storage_account_type: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword allow_blob_public_access: Public blob access allowed. :paramtype allow_blob_public_access: bool @@ -29756,7 +29609,7 @@ def __init__( "Premium_ZRS". :paramtype storage_account_type: str """ - super(SystemCreatedStorageAccount, self).__init__(**kwargs) + super().__init__(**kwargs) self.allow_blob_public_access = allow_blob_public_access self.arm_resource_id = arm_resource_id self.storage_account_hns_enabled = storage_account_hns_enabled @@ -29764,63 +29617,63 @@ def __init__( self.storage_account_type = storage_account_type -class SystemData(msrest.serialization.Model): +class SystemData(_serialization.Model): """Metadata pertaining to creation and last modification of the resource. :ivar created_by: The identity that created the resource. :vartype created_by: str - :ivar created_by_type: The type of identity that created the resource. Possible values include: - "User", "Application", "ManagedIdentity", "Key". + :ivar created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". :vartype created_by_type: str or ~azure.mgmt.machinelearningservices.models.CreatedByType :ivar created_at: The timestamp of resource creation (UTC). :vartype created_at: ~datetime.datetime :ivar last_modified_by: The identity that last modified the resource. :vartype last_modified_by: str - :ivar last_modified_by_type: The type of identity that last modified the resource. Possible - values include: "User", "Application", "ManagedIdentity", "Key". + :ivar last_modified_by_type: The type of identity that last modified the resource. Known values + are: "User", "Application", "ManagedIdentity", and "Key". :vartype last_modified_by_type: str or ~azure.mgmt.machinelearningservices.models.CreatedByType :ivar last_modified_at: The timestamp of resource last modification (UTC). :vartype last_modified_at: ~datetime.datetime """ _attribute_map = { - 'created_by': {'key': 'createdBy', 'type': 'str'}, - 'created_by_type': {'key': 'createdByType', 'type': 'str'}, - 'created_at': {'key': 'createdAt', 'type': 'iso-8601'}, - 'last_modified_by': {'key': 'lastModifiedBy', 'type': 'str'}, - 'last_modified_by_type': {'key': 'lastModifiedByType', 'type': 'str'}, - 'last_modified_at': {'key': 'lastModifiedAt', 'type': 'iso-8601'}, + "created_by": {"key": "createdBy", "type": "str"}, + "created_by_type": {"key": "createdByType", "type": "str"}, + "created_at": {"key": "createdAt", "type": "iso-8601"}, + "last_modified_by": {"key": "lastModifiedBy", "type": "str"}, + "last_modified_by_type": {"key": "lastModifiedByType", "type": "str"}, + "last_modified_at": {"key": "lastModifiedAt", "type": "iso-8601"}, } def __init__( self, *, created_by: Optional[str] = None, - created_by_type: Optional[Union[str, "CreatedByType"]] = None, + created_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, created_at: Optional[datetime.datetime] = None, last_modified_by: Optional[str] = None, - last_modified_by_type: Optional[Union[str, "CreatedByType"]] = None, + last_modified_by_type: Optional[Union[str, "_models.CreatedByType"]] = None, last_modified_at: Optional[datetime.datetime] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword created_by: The identity that created the resource. :paramtype created_by: str - :keyword created_by_type: The type of identity that created the resource. Possible values - include: "User", "Application", "ManagedIdentity", "Key". + :keyword created_by_type: The type of identity that created the resource. Known values are: + "User", "Application", "ManagedIdentity", and "Key". :paramtype created_by_type: str or ~azure.mgmt.machinelearningservices.models.CreatedByType :keyword created_at: The timestamp of resource creation (UTC). :paramtype created_at: ~datetime.datetime :keyword last_modified_by: The identity that last modified the resource. :paramtype last_modified_by: str - :keyword last_modified_by_type: The type of identity that last modified the resource. Possible - values include: "User", "Application", "ManagedIdentity", "Key". + :keyword last_modified_by_type: The type of identity that last modified the resource. Known + values are: "User", "Application", "ManagedIdentity", and "Key". :paramtype last_modified_by_type: str or ~azure.mgmt.machinelearningservices.models.CreatedByType :keyword last_modified_at: The timestamp of resource last modification (UTC). :paramtype last_modified_at: ~datetime.datetime """ - super(SystemData, self).__init__(**kwargs) + super().__init__(**kwargs) self.created_by = created_by self.created_by_type = created_by_type self.created_at = created_at @@ -29829,7 +29682,7 @@ def __init__( self.last_modified_at = last_modified_at -class SystemService(msrest.serialization.Model): +class SystemService(_serialization.Model): """A system service running on a compute. Variables are only populated by the server, and will be ignored when sending a request. @@ -29843,30 +29696,26 @@ class SystemService(msrest.serialization.Model): """ _validation = { - 'system_service_type': {'readonly': True}, - 'public_ip_address': {'readonly': True}, - 'version': {'readonly': True}, + "system_service_type": {"readonly": True}, + "public_ip_address": {"readonly": True}, + "version": {"readonly": True}, } _attribute_map = { - 'system_service_type': {'key': 'systemServiceType', 'type': 'str'}, - 'public_ip_address': {'key': 'publicIpAddress', 'type': 'str'}, - 'version': {'key': 'version', 'type': 'str'}, + "system_service_type": {"key": "systemServiceType", "type": "str"}, + "public_ip_address": {"key": "publicIpAddress", "type": "str"}, + "version": {"key": "version", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(SystemService, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.system_service_type = None self.public_ip_address = None self.version = None -class TableFixedParameters(msrest.serialization.Model): +class TableFixedParameters(_serialization.Model): # pylint: disable=too-many-instance-attributes """Fixed training parameters that won't be swept over during AutoML Table training. :ivar booster: Specify the boosting type, e.g gbdt for XGBoost. @@ -29914,26 +29763,26 @@ class TableFixedParameters(msrest.serialization.Model): """ _attribute_map = { - 'booster': {'key': 'booster', 'type': 'str'}, - 'boosting_type': {'key': 'boostingType', 'type': 'str'}, - 'grow_policy': {'key': 'growPolicy', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'float'}, - 'max_bin': {'key': 'maxBin', 'type': 'int'}, - 'max_depth': {'key': 'maxDepth', 'type': 'int'}, - 'max_leaves': {'key': 'maxLeaves', 'type': 'int'}, - 'min_data_in_leaf': {'key': 'minDataInLeaf', 'type': 'int'}, - 'min_split_gain': {'key': 'minSplitGain', 'type': 'float'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'n_estimators': {'key': 'nEstimators', 'type': 'int'}, - 'num_leaves': {'key': 'numLeaves', 'type': 'int'}, - 'preprocessor_name': {'key': 'preprocessorName', 'type': 'str'}, - 'reg_alpha': {'key': 'regAlpha', 'type': 'float'}, - 'reg_lambda': {'key': 'regLambda', 'type': 'float'}, - 'subsample': {'key': 'subsample', 'type': 'float'}, - 'subsample_freq': {'key': 'subsampleFreq', 'type': 'float'}, - 'tree_method': {'key': 'treeMethod', 'type': 'str'}, - 'with_mean': {'key': 'withMean', 'type': 'bool'}, - 'with_std': {'key': 'withStd', 'type': 'bool'}, + "booster": {"key": "booster", "type": "str"}, + "boosting_type": {"key": "boostingType", "type": "str"}, + "grow_policy": {"key": "growPolicy", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "float"}, + "max_bin": {"key": "maxBin", "type": "int"}, + "max_depth": {"key": "maxDepth", "type": "int"}, + "max_leaves": {"key": "maxLeaves", "type": "int"}, + "min_data_in_leaf": {"key": "minDataInLeaf", "type": "int"}, + "min_split_gain": {"key": "minSplitGain", "type": "float"}, + "model_name": {"key": "modelName", "type": "str"}, + "n_estimators": {"key": "nEstimators", "type": "int"}, + "num_leaves": {"key": "numLeaves", "type": "int"}, + "preprocessor_name": {"key": "preprocessorName", "type": "str"}, + "reg_alpha": {"key": "regAlpha", "type": "float"}, + "reg_lambda": {"key": "regLambda", "type": "float"}, + "subsample": {"key": "subsample", "type": "float"}, + "subsample_freq": {"key": "subsampleFreq", "type": "float"}, + "tree_method": {"key": "treeMethod", "type": "str"}, + "with_mean": {"key": "withMean", "type": "bool"}, + "with_std": {"key": "withStd", "type": "bool"}, } def __init__( @@ -29957,10 +29806,10 @@ def __init__( subsample: Optional[float] = None, subsample_freq: Optional[float] = None, tree_method: Optional[str] = None, - with_mean: Optional[bool] = False, - with_std: Optional[bool] = False, - **kwargs - ): + with_mean: bool = False, + with_std: bool = False, + **kwargs: Any + ) -> None: """ :keyword booster: Specify the boosting type, e.g gbdt for XGBoost. :paramtype booster: str @@ -30005,7 +29854,7 @@ def __init__( :keyword with_std: If true, scaling the data with Unit Variance with StandardScalar. :paramtype with_std: bool """ - super(TableFixedParameters, self).__init__(**kwargs) + super().__init__(**kwargs) self.booster = booster self.boosting_type = boosting_type self.grow_policy = grow_policy @@ -30028,7 +29877,7 @@ def __init__( self.with_std = with_std -class TableParameterSubspace(msrest.serialization.Model): +class TableParameterSubspace(_serialization.Model): # pylint: disable=too-many-instance-attributes """TableParameterSubspace. :ivar booster: Specify the boosting type, e.g gbdt for XGBoost. @@ -30076,26 +29925,26 @@ class TableParameterSubspace(msrest.serialization.Model): """ _attribute_map = { - 'booster': {'key': 'booster', 'type': 'str'}, - 'boosting_type': {'key': 'boostingType', 'type': 'str'}, - 'grow_policy': {'key': 'growPolicy', 'type': 'str'}, - 'learning_rate': {'key': 'learningRate', 'type': 'str'}, - 'max_bin': {'key': 'maxBin', 'type': 'str'}, - 'max_depth': {'key': 'maxDepth', 'type': 'str'}, - 'max_leaves': {'key': 'maxLeaves', 'type': 'str'}, - 'min_data_in_leaf': {'key': 'minDataInLeaf', 'type': 'str'}, - 'min_split_gain': {'key': 'minSplitGain', 'type': 'str'}, - 'model_name': {'key': 'modelName', 'type': 'str'}, - 'n_estimators': {'key': 'nEstimators', 'type': 'str'}, - 'num_leaves': {'key': 'numLeaves', 'type': 'str'}, - 'preprocessor_name': {'key': 'preprocessorName', 'type': 'str'}, - 'reg_alpha': {'key': 'regAlpha', 'type': 'str'}, - 'reg_lambda': {'key': 'regLambda', 'type': 'str'}, - 'subsample': {'key': 'subsample', 'type': 'str'}, - 'subsample_freq': {'key': 'subsampleFreq', 'type': 'str'}, - 'tree_method': {'key': 'treeMethod', 'type': 'str'}, - 'with_mean': {'key': 'withMean', 'type': 'str'}, - 'with_std': {'key': 'withStd', 'type': 'str'}, + "booster": {"key": "booster", "type": "str"}, + "boosting_type": {"key": "boostingType", "type": "str"}, + "grow_policy": {"key": "growPolicy", "type": "str"}, + "learning_rate": {"key": "learningRate", "type": "str"}, + "max_bin": {"key": "maxBin", "type": "str"}, + "max_depth": {"key": "maxDepth", "type": "str"}, + "max_leaves": {"key": "maxLeaves", "type": "str"}, + "min_data_in_leaf": {"key": "minDataInLeaf", "type": "str"}, + "min_split_gain": {"key": "minSplitGain", "type": "str"}, + "model_name": {"key": "modelName", "type": "str"}, + "n_estimators": {"key": "nEstimators", "type": "str"}, + "num_leaves": {"key": "numLeaves", "type": "str"}, + "preprocessor_name": {"key": "preprocessorName", "type": "str"}, + "reg_alpha": {"key": "regAlpha", "type": "str"}, + "reg_lambda": {"key": "regLambda", "type": "str"}, + "subsample": {"key": "subsample", "type": "str"}, + "subsample_freq": {"key": "subsampleFreq", "type": "str"}, + "tree_method": {"key": "treeMethod", "type": "str"}, + "with_mean": {"key": "withMean", "type": "str"}, + "with_std": {"key": "withStd", "type": "str"}, } def __init__( @@ -30121,8 +29970,8 @@ def __init__( tree_method: Optional[str] = None, with_mean: Optional[str] = None, with_std: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword booster: Specify the boosting type, e.g gbdt for XGBoost. :paramtype booster: str @@ -30167,7 +30016,7 @@ def __init__( :keyword with_std: If true, scaling the data with Unit Variance with StandardScalar. :paramtype with_std: str """ - super(TableParameterSubspace, self).__init__(**kwargs) + super().__init__(**kwargs) self.booster = booster self.boosting_type = boosting_type self.grow_policy = grow_policy @@ -30190,44 +30039,44 @@ def __init__( self.with_std = with_std -class TableSweepSettings(msrest.serialization.Model): +class TableSweepSettings(_serialization.Model): """TableSweepSettings. All required parameters must be populated in order to send to Azure. :ivar early_termination: Type of early termination policy for the sweeping job. :vartype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :ivar sampling_algorithm: Required. [Required] Type of sampling algorithm. Possible values - include: "Grid", "Random", "Bayesian". + :ivar sampling_algorithm: [Required] Type of sampling algorithm. Required. Known values are: + "Grid", "Random", and "Bayesian". :vartype sampling_algorithm: str or ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ _validation = { - 'sampling_algorithm': {'required': True}, + "sampling_algorithm": {"required": True}, } _attribute_map = { - 'early_termination': {'key': 'earlyTermination', 'type': 'EarlyTerminationPolicy'}, - 'sampling_algorithm': {'key': 'samplingAlgorithm', 'type': 'str'}, + "early_termination": {"key": "earlyTermination", "type": "EarlyTerminationPolicy"}, + "sampling_algorithm": {"key": "samplingAlgorithm", "type": "str"}, } def __init__( self, *, - sampling_algorithm: Union[str, "SamplingAlgorithmType"], - early_termination: Optional["EarlyTerminationPolicy"] = None, - **kwargs - ): + sampling_algorithm: Union[str, "_models.SamplingAlgorithmType"], + early_termination: Optional["_models.EarlyTerminationPolicy"] = None, + **kwargs: Any + ) -> None: """ :keyword early_termination: Type of early termination policy for the sweeping job. :paramtype early_termination: ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicy - :keyword sampling_algorithm: Required. [Required] Type of sampling algorithm. Possible values - include: "Grid", "Random", "Bayesian". + :keyword sampling_algorithm: [Required] Type of sampling algorithm. Required. Known values are: + "Grid", "Random", and "Bayesian". :paramtype sampling_algorithm: str or ~azure.mgmt.machinelearningservices.models.SamplingAlgorithmType """ - super(TableSweepSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.early_termination = early_termination self.sampling_algorithm = sampling_algorithm @@ -30250,7 +30099,7 @@ class TableVerticalFeaturizationSettings(FeaturizationSettings): care of necessary transformation of the data in featurization phase. If 'Off' is selected then no featurization is done. If 'Custom' is selected then user can specify additional inputs to customize how featurization - is done. Possible values include: "Auto", "Custom", "Off". + is done. Known values are: "Auto", "Custom", and "Off". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.FeaturizationMode :ivar transformer_params: User can specify additional transformers to be used along with the columns to which it would be applied and parameters for the transformer constructor. @@ -30259,25 +30108,25 @@ class TableVerticalFeaturizationSettings(FeaturizationSettings): """ _attribute_map = { - 'dataset_language': {'key': 'datasetLanguage', 'type': 'str'}, - 'blocked_transformers': {'key': 'blockedTransformers', 'type': '[str]'}, - 'column_name_and_types': {'key': 'columnNameAndTypes', 'type': '{str}'}, - 'enable_dnn_featurization': {'key': 'enableDnnFeaturization', 'type': 'bool'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'transformer_params': {'key': 'transformerParams', 'type': '{[ColumnTransformer]}'}, + "dataset_language": {"key": "datasetLanguage", "type": "str"}, + "blocked_transformers": {"key": "blockedTransformers", "type": "[str]"}, + "column_name_and_types": {"key": "columnNameAndTypes", "type": "{str}"}, + "enable_dnn_featurization": {"key": "enableDnnFeaturization", "type": "bool"}, + "mode": {"key": "mode", "type": "str"}, + "transformer_params": {"key": "transformerParams", "type": "{[ColumnTransformer]}"}, } def __init__( self, *, dataset_language: Optional[str] = None, - blocked_transformers: Optional[List[Union[str, "BlockedTransformers"]]] = None, + blocked_transformers: Optional[List[Union[str, "_models.BlockedTransformers"]]] = None, column_name_and_types: Optional[Dict[str, str]] = None, - enable_dnn_featurization: Optional[bool] = False, - mode: Optional[Union[str, "FeaturizationMode"]] = None, - transformer_params: Optional[Dict[str, List["ColumnTransformer"]]] = None, - **kwargs - ): + enable_dnn_featurization: bool = False, + mode: Optional[Union[str, "_models.FeaturizationMode"]] = None, + transformer_params: Optional[Dict[str, List["_models.ColumnTransformer"]]] = None, + **kwargs: Any + ) -> None: """ :keyword dataset_language: Dataset language, useful for the text data. :paramtype dataset_language: str @@ -30294,14 +30143,14 @@ def __init__( care of necessary transformation of the data in featurization phase. If 'Off' is selected then no featurization is done. If 'Custom' is selected then user can specify additional inputs to customize how featurization - is done. Possible values include: "Auto", "Custom", "Off". + is done. Known values are: "Auto", "Custom", and "Off". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.FeaturizationMode :keyword transformer_params: User can specify additional transformers to be used along with the columns to which it would be applied and parameters for the transformer constructor. :paramtype transformer_params: dict[str, list[~azure.mgmt.machinelearningservices.models.ColumnTransformer]] """ - super(TableVerticalFeaturizationSettings, self).__init__(dataset_language=dataset_language, **kwargs) + super().__init__(dataset_language=dataset_language, **kwargs) self.blocked_transformers = blocked_transformers self.column_name_and_types = column_name_and_types self.enable_dnn_featurization = enable_dnn_featurization @@ -30309,7 +30158,7 @@ def __init__( self.transformer_params = transformer_params -class TableVerticalLimitSettings(msrest.serialization.Model): +class TableVerticalLimitSettings(_serialization.Model): """Job execution constraints. :ivar enable_early_termination: Enable early termination, determines whether or not if @@ -30336,33 +30185,33 @@ class TableVerticalLimitSettings(msrest.serialization.Model): """ _attribute_map = { - 'enable_early_termination': {'key': 'enableEarlyTermination', 'type': 'bool'}, - 'exit_score': {'key': 'exitScore', 'type': 'float'}, - 'max_concurrent_trials': {'key': 'maxConcurrentTrials', 'type': 'int'}, - 'max_cores_per_trial': {'key': 'maxCoresPerTrial', 'type': 'int'}, - 'max_nodes': {'key': 'maxNodes', 'type': 'int'}, - 'max_trials': {'key': 'maxTrials', 'type': 'int'}, - 'sweep_concurrent_trials': {'key': 'sweepConcurrentTrials', 'type': 'int'}, - 'sweep_trials': {'key': 'sweepTrials', 'type': 'int'}, - 'timeout': {'key': 'timeout', 'type': 'duration'}, - 'trial_timeout': {'key': 'trialTimeout', 'type': 'duration'}, + "enable_early_termination": {"key": "enableEarlyTermination", "type": "bool"}, + "exit_score": {"key": "exitScore", "type": "float"}, + "max_concurrent_trials": {"key": "maxConcurrentTrials", "type": "int"}, + "max_cores_per_trial": {"key": "maxCoresPerTrial", "type": "int"}, + "max_nodes": {"key": "maxNodes", "type": "int"}, + "max_trials": {"key": "maxTrials", "type": "int"}, + "sweep_concurrent_trials": {"key": "sweepConcurrentTrials", "type": "int"}, + "sweep_trials": {"key": "sweepTrials", "type": "int"}, + "timeout": {"key": "timeout", "type": "duration"}, + "trial_timeout": {"key": "trialTimeout", "type": "duration"}, } def __init__( self, *, - enable_early_termination: Optional[bool] = True, + enable_early_termination: bool = True, exit_score: Optional[float] = None, - max_concurrent_trials: Optional[int] = 1, - max_cores_per_trial: Optional[int] = -1, - max_nodes: Optional[int] = 1, - max_trials: Optional[int] = 1000, - sweep_concurrent_trials: Optional[int] = 0, - sweep_trials: Optional[int] = 0, - timeout: Optional[datetime.timedelta] = "PT6H", - trial_timeout: Optional[datetime.timedelta] = "PT30M", - **kwargs - ): + max_concurrent_trials: int = 1, + max_cores_per_trial: int = -1, + max_nodes: int = 1, + max_trials: int = 1000, + sweep_concurrent_trials: int = 0, + sweep_trials: int = 0, + timeout: datetime.timedelta = "PT6H", + trial_timeout: datetime.timedelta = "PT30M", + **kwargs: Any + ) -> None: """ :keyword enable_early_termination: Enable early termination, determines whether or not if AutoMLJob will terminate early if there is no score improvement in last 20 iterations. @@ -30387,7 +30236,7 @@ def __init__( :keyword trial_timeout: Iteration timeout. :paramtype trial_timeout: ~datetime.timedelta """ - super(TableVerticalLimitSettings, self).__init__(**kwargs) + super().__init__(**kwargs) self.enable_early_termination = enable_early_termination self.exit_score = exit_score self.max_concurrent_trials = max_concurrent_trials @@ -30405,8 +30254,8 @@ class TargetUtilizationScaleSettings(OnlineScaleSettings): All required parameters must be populated in order to send to Azure. - :ivar scale_type: Required. [Required] Type of deployment scaling algorithm.Constant filled by - server. Possible values include: "Default", "TargetUtilization". + :ivar scale_type: [Required] Type of deployment scaling algorithm. Required. Known values are: + "Default" and "TargetUtilization". :vartype scale_type: str or ~azure.mgmt.machinelearningservices.models.ScaleType :ivar max_instances: The maximum number of instances that the deployment can scale to. The quota will be reserved for max_instances. @@ -30421,26 +30270,26 @@ class TargetUtilizationScaleSettings(OnlineScaleSettings): """ _validation = { - 'scale_type': {'required': True}, + "scale_type": {"required": True}, } _attribute_map = { - 'scale_type': {'key': 'scaleType', 'type': 'str'}, - 'max_instances': {'key': 'maxInstances', 'type': 'int'}, - 'min_instances': {'key': 'minInstances', 'type': 'int'}, - 'polling_interval': {'key': 'pollingInterval', 'type': 'duration'}, - 'target_utilization_percentage': {'key': 'targetUtilizationPercentage', 'type': 'int'}, + "scale_type": {"key": "scaleType", "type": "str"}, + "max_instances": {"key": "maxInstances", "type": "int"}, + "min_instances": {"key": "minInstances", "type": "int"}, + "polling_interval": {"key": "pollingInterval", "type": "duration"}, + "target_utilization_percentage": {"key": "targetUtilizationPercentage", "type": "int"}, } def __init__( self, *, - max_instances: Optional[int] = 1, - min_instances: Optional[int] = 1, - polling_interval: Optional[datetime.timedelta] = "PT1S", - target_utilization_percentage: Optional[int] = 70, - **kwargs - ): + max_instances: int = 1, + min_instances: int = 1, + polling_interval: datetime.timedelta = "PT1S", + target_utilization_percentage: int = 70, + **kwargs: Any + ) -> None: """ :keyword max_instances: The maximum number of instances that the deployment can scale to. The quota will be reserved for max_instances. @@ -30453,8 +30302,8 @@ def __init__( :keyword target_utilization_percentage: Target CPU usage for the autoscaler. :paramtype target_utilization_percentage: int """ - super(TargetUtilizationScaleSettings, self).__init__(**kwargs) - self.scale_type = 'TargetUtilization' # type: str + super().__init__(**kwargs) + self.scale_type: str = "TargetUtilization" self.max_instances = max_instances self.min_instances = min_instances self.polling_interval = polling_interval @@ -30466,9 +30315,8 @@ class TensorFlow(DistributionConfiguration): All required parameters must be populated in order to send to Azure. - :ivar distribution_type: Required. [Required] Specifies the type of distribution - framework.Constant filled by server. Possible values include: "PyTorch", "TensorFlow", "Mpi", - "Ray". + :ivar distribution_type: [Required] Specifies the type of distribution framework. Required. + Known values are: "PyTorch", "TensorFlow", "Mpi", and "Ray". :vartype distribution_type: str or ~azure.mgmt.machinelearningservices.models.DistributionType :ivar parameter_server_count: Number of parameter server tasks. :vartype parameter_server_count: int @@ -30477,40 +30325,47 @@ class TensorFlow(DistributionConfiguration): """ _validation = { - 'distribution_type': {'required': True}, + "distribution_type": {"required": True}, } _attribute_map = { - 'distribution_type': {'key': 'distributionType', 'type': 'str'}, - 'parameter_server_count': {'key': 'parameterServerCount', 'type': 'int'}, - 'worker_count': {'key': 'workerCount', 'type': 'int'}, + "distribution_type": {"key": "distributionType", "type": "str"}, + "parameter_server_count": {"key": "parameterServerCount", "type": "int"}, + "worker_count": {"key": "workerCount", "type": "int"}, } - def __init__( - self, - *, - parameter_server_count: Optional[int] = 0, - worker_count: Optional[int] = None, - **kwargs - ): + def __init__(self, *, parameter_server_count: int = 0, worker_count: Optional[int] = None, **kwargs: Any) -> None: """ :keyword parameter_server_count: Number of parameter server tasks. :paramtype parameter_server_count: int :keyword worker_count: Number of workers. If not specified, will default to the instance count. :paramtype worker_count: int """ - super(TensorFlow, self).__init__(**kwargs) - self.distribution_type = 'TensorFlow' # type: str + super().__init__(**kwargs) + self.distribution_type: str = "TensorFlow" self.parameter_server_count = parameter_server_count self.worker_count = worker_count -class TextClassification(AutoMLVertical, NlpVertical): +class TextClassification(NlpVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes """Text Classification task in AutoML NLP vertical. -NLP - Natural Language Processing. + NLP - Natural Language Processing. All required parameters must be populated in order to send to Azure. + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar featurization_settings: Featurization inputs needed for AutoML job. :vartype featurization_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings @@ -30526,61 +30381,56 @@ class TextClassification(AutoMLVertical, NlpVertical): :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings :ivar validation_data: Validation data inputs. :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar primary_metric: Primary metric for Text-Classification task. Possible values include: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + :ivar primary_metric: Primary metric for Text-Classification task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and "PrecisionScoreWeighted". :vartype primary_metric: str or ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics """ _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, } _attribute_map = { - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'NlpFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, - 'search_space': {'key': 'searchSpace', 'type': '[NlpParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'NlpSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } def __init__( self, *, - training_data: "MLTableJobInput", - featurization_settings: Optional["NlpVerticalFeaturizationSettings"] = None, - fixed_parameters: Optional["NlpFixedParameters"] = None, - limit_settings: Optional["NlpVerticalLimitSettings"] = None, - search_space: Optional[List["NlpParameterSubspace"]] = None, - sweep_settings: Optional["NlpSweepSettings"] = None, - validation_data: Optional["MLTableJobInput"] = None, - log_verbosity: Optional[Union[str, "LogVerbosity"]] = None, + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, target_column_name: Optional[str] = None, - primary_metric: Optional[Union[str, "ClassificationPrimaryMetrics"]] = None, - **kwargs - ): - """ + featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.NlpFixedParameters"] = None, + limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + search_space: Optional[List["_models.NlpParameterSubspace"]] = None, + sweep_settings: Optional["_models.NlpSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + primary_metric: Optional[Union[str, "_models.ClassificationPrimaryMetrics"]] = None, + **kwargs: Any + ) -> None: + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :keyword featurization_settings: Featurization inputs needed for AutoML job. :paramtype featurization_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings @@ -30596,42 +30446,58 @@ def __init__( :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings :keyword validation_data: Validation data inputs. :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword primary_metric: Primary metric for Text-Classification task. Possible values include: - "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + :keyword primary_metric: Primary metric for Text-Classification task. Known values are: + "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and "PrecisionScoreWeighted". :paramtype primary_metric: str or ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics """ - super(TextClassification, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, featurization_settings=featurization_settings, fixed_parameters=fixed_parameters, limit_settings=limit_settings, search_space=search_space, sweep_settings=sweep_settings, validation_data=validation_data, **kwargs) + super().__init__( + featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, + limit_settings=limit_settings, + search_space=search_space, + sweep_settings=sweep_settings, + validation_data=validation_data, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type: str = "TextClassification" + self.training_data = training_data + self.primary_metric = primary_metric self.featurization_settings = featurization_settings self.fixed_parameters = fixed_parameters self.limit_settings = limit_settings self.search_space = search_space self.sweep_settings = sweep_settings self.validation_data = validation_data - self.task_type = 'TextClassification' # type: str - self.primary_metric = primary_metric - self.log_verbosity = log_verbosity - self.target_column_name = target_column_name - self.training_data = training_data -class TextClassificationMultilabel(AutoMLVertical, NlpVertical): +class TextClassificationMultilabel(NlpVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes """Text Classification Multilabel task in AutoML NLP vertical. -NLP - Natural Language Processing. + NLP - Natural Language Processing. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar featurization_settings: Featurization inputs needed for AutoML job. :vartype featurization_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings @@ -30647,62 +30513,57 @@ class TextClassificationMultilabel(AutoMLVertical, NlpVertical): :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings :ivar validation_data: Validation data inputs. :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar primary_metric: Primary metric for Text-Classification-Multilabel task. Currently only Accuracy is supported as primary metric, hence user need not set it explicitly. - Possible values include: "AUCWeighted", "Accuracy", "NormMacroRecall", - "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", "IOU". + Known values are: "AUCWeighted", "Accuracy", "NormMacroRecall", + "AveragePrecisionScoreWeighted", "PrecisionScoreWeighted", and "IOU". :vartype primary_metric: str or ~azure.mgmt.machinelearningservices.models.ClassificationMultilabelPrimaryMetrics """ _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, - 'primary_metric': {'readonly': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, + "primary_metric": {"readonly": True}, } _attribute_map = { - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'NlpFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, - 'search_space': {'key': 'searchSpace', 'type': '[NlpParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'NlpSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } def __init__( self, *, - training_data: "MLTableJobInput", - featurization_settings: Optional["NlpVerticalFeaturizationSettings"] = None, - fixed_parameters: Optional["NlpFixedParameters"] = None, - limit_settings: Optional["NlpVerticalLimitSettings"] = None, - search_space: Optional[List["NlpParameterSubspace"]] = None, - sweep_settings: Optional["NlpSweepSettings"] = None, - validation_data: Optional["MLTableJobInput"] = None, - log_verbosity: Optional[Union[str, "LogVerbosity"]] = None, + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, target_column_name: Optional[str] = None, - **kwargs - ): - """ + featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.NlpFixedParameters"] = None, + limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + search_space: Optional[List["_models.NlpParameterSubspace"]] = None, + sweep_settings: Optional["_models.NlpSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + **kwargs: Any + ) -> None: + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :keyword featurization_settings: Featurization inputs needed for AutoML job. :paramtype featurization_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings @@ -30718,38 +30579,54 @@ def __init__( :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings :keyword validation_data: Validation data inputs. :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ - super(TextClassificationMultilabel, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, featurization_settings=featurization_settings, fixed_parameters=fixed_parameters, limit_settings=limit_settings, search_space=search_space, sweep_settings=sweep_settings, validation_data=validation_data, **kwargs) + super().__init__( + featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, + limit_settings=limit_settings, + search_space=search_space, + sweep_settings=sweep_settings, + validation_data=validation_data, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type: str = "TextClassificationMultilabel" + self.training_data = training_data + self.primary_metric = None self.featurization_settings = featurization_settings self.fixed_parameters = fixed_parameters self.limit_settings = limit_settings self.search_space = search_space self.sweep_settings = sweep_settings self.validation_data = validation_data - self.task_type = 'TextClassificationMultilabel' # type: str - self.primary_metric = None - self.log_verbosity = log_verbosity - self.target_column_name = target_column_name - self.training_data = training_data -class TextNer(AutoMLVertical, NlpVertical): +class TextNer(NlpVertical, AutoMLVertical): # pylint: disable=too-many-instance-attributes """Text-NER task in AutoML NLP vertical. -NER - Named Entity Recognition. -NLP - Natural Language Processing. + NER - Named Entity Recognition. + NLP - Natural Language Processing. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. + :ivar log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :ivar target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :vartype target_column_name: str + :ivar task_type: [Required] Task type for AutoMLJob. Required. Known values are: + "Classification", "Regression", "Forecasting", "ImageClassification", + "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", + "TextClassification", "TextClassificationMultilabel", and "TextNER". + :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType + :ivar training_data: [Required] Training data input. Required. + :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar featurization_settings: Featurization inputs needed for AutoML job. :vartype featurization_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings @@ -30765,62 +30642,57 @@ class TextNer(AutoMLVertical, NlpVertical): :vartype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings :ivar validation_data: Validation data inputs. :vartype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :ivar log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :vartype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :ivar target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :vartype target_column_name: str - :ivar task_type: Required. [Required] Task type for AutoMLJob.Constant filled by server. - Possible values include: "Classification", "Regression", "Forecasting", "ImageClassification", - "ImageClassificationMultilabel", "ImageObjectDetection", "ImageInstanceSegmentation", - "TextClassification", "TextClassificationMultilabel", "TextNER". - :vartype task_type: str or ~azure.mgmt.machinelearningservices.models.TaskType - :ivar training_data: Required. [Required] Training data input. - :vartype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :ivar primary_metric: Primary metric for Text-NER task. - Only 'Accuracy' is supported for Text-NER, so user need not set this explicitly. Possible - values include: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", + Only 'Accuracy' is supported for Text-NER, so user need not set this explicitly. Known values + are: "AUCWeighted", "Accuracy", "NormMacroRecall", "AveragePrecisionScoreWeighted", and "PrecisionScoreWeighted". :vartype primary_metric: str or ~azure.mgmt.machinelearningservices.models.ClassificationPrimaryMetrics """ _validation = { - 'task_type': {'required': True}, - 'training_data': {'required': True}, - 'primary_metric': {'readonly': True}, + "task_type": {"required": True}, + "training_data": {"required": True}, + "primary_metric": {"readonly": True}, } _attribute_map = { - 'featurization_settings': {'key': 'featurizationSettings', 'type': 'NlpVerticalFeaturizationSettings'}, - 'fixed_parameters': {'key': 'fixedParameters', 'type': 'NlpFixedParameters'}, - 'limit_settings': {'key': 'limitSettings', 'type': 'NlpVerticalLimitSettings'}, - 'search_space': {'key': 'searchSpace', 'type': '[NlpParameterSubspace]'}, - 'sweep_settings': {'key': 'sweepSettings', 'type': 'NlpSweepSettings'}, - 'validation_data': {'key': 'validationData', 'type': 'MLTableJobInput'}, - 'log_verbosity': {'key': 'logVerbosity', 'type': 'str'}, - 'target_column_name': {'key': 'targetColumnName', 'type': 'str'}, - 'task_type': {'key': 'taskType', 'type': 'str'}, - 'training_data': {'key': 'trainingData', 'type': 'MLTableJobInput'}, - 'primary_metric': {'key': 'primaryMetric', 'type': 'str'}, + "log_verbosity": {"key": "logVerbosity", "type": "str"}, + "target_column_name": {"key": "targetColumnName", "type": "str"}, + "task_type": {"key": "taskType", "type": "str"}, + "training_data": {"key": "trainingData", "type": "MLTableJobInput"}, + "featurization_settings": {"key": "featurizationSettings", "type": "NlpVerticalFeaturizationSettings"}, + "fixed_parameters": {"key": "fixedParameters", "type": "NlpFixedParameters"}, + "limit_settings": {"key": "limitSettings", "type": "NlpVerticalLimitSettings"}, + "search_space": {"key": "searchSpace", "type": "[NlpParameterSubspace]"}, + "sweep_settings": {"key": "sweepSettings", "type": "NlpSweepSettings"}, + "validation_data": {"key": "validationData", "type": "MLTableJobInput"}, + "primary_metric": {"key": "primaryMetric", "type": "str"}, } def __init__( self, *, - training_data: "MLTableJobInput", - featurization_settings: Optional["NlpVerticalFeaturizationSettings"] = None, - fixed_parameters: Optional["NlpFixedParameters"] = None, - limit_settings: Optional["NlpVerticalLimitSettings"] = None, - search_space: Optional[List["NlpParameterSubspace"]] = None, - sweep_settings: Optional["NlpSweepSettings"] = None, - validation_data: Optional["MLTableJobInput"] = None, - log_verbosity: Optional[Union[str, "LogVerbosity"]] = None, + training_data: "_models.MLTableJobInput", + log_verbosity: Optional[Union[str, "_models.LogVerbosity"]] = None, target_column_name: Optional[str] = None, - **kwargs - ): - """ + featurization_settings: Optional["_models.NlpVerticalFeaturizationSettings"] = None, + fixed_parameters: Optional["_models.NlpFixedParameters"] = None, + limit_settings: Optional["_models.NlpVerticalLimitSettings"] = None, + search_space: Optional[List["_models.NlpParameterSubspace"]] = None, + sweep_settings: Optional["_models.NlpSweepSettings"] = None, + validation_data: Optional["_models.MLTableJobInput"] = None, + **kwargs: Any + ) -> None: + """ + :keyword log_verbosity: Log verbosity for the job. Known values are: "NotSet", "Debug", "Info", + "Warning", "Error", and "Critical". + :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity + :keyword target_column_name: Target column name: This is prediction values column. + Also known as label column name in context of classification tasks. + :paramtype target_column_name: str + :keyword training_data: [Required] Training data input. Required. + :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput :keyword featurization_settings: Featurization inputs needed for AutoML job. :paramtype featurization_settings: ~azure.mgmt.machinelearningservices.models.NlpVerticalFeaturizationSettings @@ -30836,30 +30708,33 @@ def __init__( :paramtype sweep_settings: ~azure.mgmt.machinelearningservices.models.NlpSweepSettings :keyword validation_data: Validation data inputs. :paramtype validation_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput - :keyword log_verbosity: Log verbosity for the job. Possible values include: "NotSet", "Debug", - "Info", "Warning", "Error", "Critical". - :paramtype log_verbosity: str or ~azure.mgmt.machinelearningservices.models.LogVerbosity - :keyword target_column_name: Target column name: This is prediction values column. - Also known as label column name in context of classification tasks. - :paramtype target_column_name: str - :keyword training_data: Required. [Required] Training data input. - :paramtype training_data: ~azure.mgmt.machinelearningservices.models.MLTableJobInput """ - super(TextNer, self).__init__(log_verbosity=log_verbosity, target_column_name=target_column_name, training_data=training_data, featurization_settings=featurization_settings, fixed_parameters=fixed_parameters, limit_settings=limit_settings, search_space=search_space, sweep_settings=sweep_settings, validation_data=validation_data, **kwargs) + super().__init__( + featurization_settings=featurization_settings, + fixed_parameters=fixed_parameters, + limit_settings=limit_settings, + search_space=search_space, + sweep_settings=sweep_settings, + validation_data=validation_data, + log_verbosity=log_verbosity, + target_column_name=target_column_name, + training_data=training_data, + **kwargs + ) + self.log_verbosity = log_verbosity + self.target_column_name = target_column_name + self.task_type: str = "TextNER" + self.training_data = training_data + self.primary_metric = None self.featurization_settings = featurization_settings self.fixed_parameters = fixed_parameters self.limit_settings = limit_settings self.search_space = search_space self.sweep_settings = sweep_settings self.validation_data = validation_data - self.task_type = 'TextNER' # type: str - self.primary_metric = None - self.log_verbosity = log_verbosity - self.target_column_name = target_column_name - self.training_data = training_data -class TmpfsOptions(msrest.serialization.Model): +class TmpfsOptions(_serialization.Model): """TmpfsOptions. :ivar size: Mention the Tmpfs size. @@ -30867,20 +30742,15 @@ class TmpfsOptions(msrest.serialization.Model): """ _attribute_map = { - 'size': {'key': 'size', 'type': 'int'}, + "size": {"key": "size", "type": "int"}, } - def __init__( - self, - *, - size: Optional[int] = None, - **kwargs - ): + def __init__(self, *, size: Optional[int] = None, **kwargs: Any) -> None: """ :keyword size: Mention the Tmpfs size. :paramtype size: int """ - super(TmpfsOptions, self).__init__(**kwargs) + super().__init__(**kwargs) self.size = size @@ -30889,9 +30759,9 @@ class TopNFeaturesByAttribution(MonitoringFeatureFilterBase): All required parameters must be populated in order to send to Azure. - :ivar filter_type: Required. [Required] Specifies the feature filter to leverage when selecting - features to calculate metrics over.Constant filled by server. Possible values include: - "AllFeatures", "TopNByAttribution", "FeatureSubset". + :ivar filter_type: [Required] Specifies the feature filter to leverage when selecting features + to calculate metrics over. Required. Known values are: "AllFeatures", "TopNByAttribution", and + "FeatureSubset". :vartype filter_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringFeatureFilterType :ivar top: The number of top features to include. @@ -30899,26 +30769,21 @@ class TopNFeaturesByAttribution(MonitoringFeatureFilterBase): """ _validation = { - 'filter_type': {'required': True}, + "filter_type": {"required": True}, } _attribute_map = { - 'filter_type': {'key': 'filterType', 'type': 'str'}, - 'top': {'key': 'top', 'type': 'int'}, + "filter_type": {"key": "filterType", "type": "str"}, + "top": {"key": "top", "type": "int"}, } - def __init__( - self, - *, - top: Optional[int] = 10, - **kwargs - ): + def __init__(self, *, top: int = 10, **kwargs: Any) -> None: """ :keyword top: The number of top features to include. :paramtype top: int """ - super(TopNFeaturesByAttribution, self).__init__(**kwargs) - self.filter_type = 'TopNByAttribution' # type: str + super().__init__(**kwargs) + self.filter_type: str = "TopNByAttribution" self.top = top @@ -30931,98 +30796,99 @@ class TrailingInputData(MonitoringInputDataBase): :vartype columns: dict[str, str] :ivar data_context: The context metadata of the data source. :vartype data_context: str - :ivar input_data_type: Required. [Required] Specifies the type of signal to monitor.Constant - filled by server. Possible values include: "Static", "Trailing", "Fixed". + :ivar input_data_type: [Required] Specifies the type of signal to monitor. Required. Known + values are: "Static", "Trailing", and "Fixed". :vartype input_data_type: str or ~azure.mgmt.machinelearningservices.models.MonitoringInputDataType - :ivar job_input_type: Required. [Required] Specifies the type of job. Possible values include: - "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", "triton_model". + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :ivar uri: Required. [Required] Input Asset URI. + :ivar uri: [Required] Input Asset URI. Required. :vartype uri: str :ivar preprocessing_component_id: The ARM resource ID of the component resource used to preprocess the data. :vartype preprocessing_component_id: str - :ivar window_offset: Required. [Required] The time offset between the end of the data window - and the monitor's current run time. + :ivar window_offset: [Required] The time offset between the end of the data window and the + monitor's current run time. Required. :vartype window_offset: ~datetime.timedelta - :ivar window_size: Required. [Required] The size of the trailing data window. + :ivar window_size: [Required] The size of the trailing data window. Required. :vartype window_size: ~datetime.timedelta """ _validation = { - 'input_data_type': {'required': True}, - 'job_input_type': {'required': True}, - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'window_offset': {'required': True}, - 'window_size': {'required': True}, + "input_data_type": {"required": True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "window_offset": {"required": True}, + "window_size": {"required": True}, } _attribute_map = { - 'columns': {'key': 'columns', 'type': '{str}'}, - 'data_context': {'key': 'dataContext', 'type': 'str'}, - 'input_data_type': {'key': 'inputDataType', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'preprocessing_component_id': {'key': 'preprocessingComponentId', 'type': 'str'}, - 'window_offset': {'key': 'windowOffset', 'type': 'duration'}, - 'window_size': {'key': 'windowSize', 'type': 'duration'}, + "columns": {"key": "columns", "type": "{str}"}, + "data_context": {"key": "dataContext", "type": "str"}, + "input_data_type": {"key": "inputDataType", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, + "preprocessing_component_id": {"key": "preprocessingComponentId", "type": "str"}, + "window_offset": {"key": "windowOffset", "type": "duration"}, + "window_size": {"key": "windowSize", "type": "duration"}, } def __init__( self, *, - job_input_type: Union[str, "JobInputType"], + job_input_type: Union[str, "_models.JobInputType"], uri: str, window_offset: datetime.timedelta, window_size: datetime.timedelta, columns: Optional[Dict[str, str]] = None, data_context: Optional[str] = None, preprocessing_component_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword columns: Mapping of column names to special uses. :paramtype columns: dict[str, str] :keyword data_context: The context metadata of the data source. :paramtype data_context: str - :keyword job_input_type: Required. [Required] Specifies the type of job. Possible values - include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", + :keyword job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". :paramtype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType - :keyword uri: Required. [Required] Input Asset URI. + :keyword uri: [Required] Input Asset URI. Required. :paramtype uri: str :keyword preprocessing_component_id: The ARM resource ID of the component resource used to preprocess the data. :paramtype preprocessing_component_id: str - :keyword window_offset: Required. [Required] The time offset between the end of the data window - and the monitor's current run time. + :keyword window_offset: [Required] The time offset between the end of the data window and the + monitor's current run time. Required. :paramtype window_offset: ~datetime.timedelta - :keyword window_size: Required. [Required] The size of the trailing data window. + :keyword window_size: [Required] The size of the trailing data window. Required. :paramtype window_size: ~datetime.timedelta """ - super(TrailingInputData, self).__init__(columns=columns, data_context=data_context, job_input_type=job_input_type, uri=uri, **kwargs) - self.input_data_type = 'Trailing' # type: str + super().__init__(columns=columns, data_context=data_context, job_input_type=job_input_type, uri=uri, **kwargs) + self.input_data_type: str = "Trailing" self.preprocessing_component_id = preprocessing_component_id self.window_offset = window_offset self.window_size = window_size -class TrialComponent(msrest.serialization.Model): +class TrialComponent(_serialization.Model): """Trial component definition. All required parameters must be populated in order to send to Azure. :ivar code_id: ARM resource ID of the code asset. :vartype code_id: str - :ivar command: Required. [Required] The command to execute on startup of the job. eg. "python - train.py". + :ivar command: [Required] The command to execute on startup of the job. eg. "python train.py". + Required. :vartype command: str :ivar distribution: Distribution configuration of the job. If set, this should be one of Mpi, Tensorflow, PyTorch, or null. :vartype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration - :ivar environment_id: Required. [Required] The ARM resource ID of the Environment specification - for the job. + :ivar environment_id: [Required] The ARM resource ID of the Environment specification for the + job. Required. :vartype environment_id: str :ivar environment_variables: Environment variables included in the job. :vartype environment_variables: dict[str, str] @@ -31031,17 +30897,17 @@ class TrialComponent(msrest.serialization.Model): """ _validation = { - 'command': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'environment_id': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "command": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, + "environment_id": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'code_id': {'key': 'codeId', 'type': 'str'}, - 'command': {'key': 'command', 'type': 'str'}, - 'distribution': {'key': 'distribution', 'type': 'DistributionConfiguration'}, - 'environment_id': {'key': 'environmentId', 'type': 'str'}, - 'environment_variables': {'key': 'environmentVariables', 'type': '{str}'}, - 'resources': {'key': 'resources', 'type': 'JobResourceConfiguration'}, + "code_id": {"key": "codeId", "type": "str"}, + "command": {"key": "command", "type": "str"}, + "distribution": {"key": "distribution", "type": "DistributionConfiguration"}, + "environment_id": {"key": "environmentId", "type": "str"}, + "environment_variables": {"key": "environmentVariables", "type": "{str}"}, + "resources": {"key": "resources", "type": "JobResourceConfiguration"}, } def __init__( @@ -31050,29 +30916,29 @@ def __init__( command: str, environment_id: str, code_id: Optional[str] = None, - distribution: Optional["DistributionConfiguration"] = None, + distribution: Optional["_models.DistributionConfiguration"] = None, environment_variables: Optional[Dict[str, str]] = None, - resources: Optional["JobResourceConfiguration"] = None, - **kwargs - ): + resources: Optional["_models.JobResourceConfiguration"] = None, + **kwargs: Any + ) -> None: """ :keyword code_id: ARM resource ID of the code asset. :paramtype code_id: str - :keyword command: Required. [Required] The command to execute on startup of the job. eg. - "python train.py". + :keyword command: [Required] The command to execute on startup of the job. eg. "python + train.py". Required. :paramtype command: str :keyword distribution: Distribution configuration of the job. If set, this should be one of Mpi, Tensorflow, PyTorch, or null. :paramtype distribution: ~azure.mgmt.machinelearningservices.models.DistributionConfiguration - :keyword environment_id: Required. [Required] The ARM resource ID of the Environment - specification for the job. + :keyword environment_id: [Required] The ARM resource ID of the Environment specification for + the job. Required. :paramtype environment_id: str :keyword environment_variables: Environment variables included in the job. :paramtype environment_variables: dict[str, str] :keyword resources: Compute Resource configuration for the job. :paramtype resources: ~azure.mgmt.machinelearningservices.models.JobResourceConfiguration """ - super(TrialComponent, self).__init__(**kwargs) + super().__init__(**kwargs) self.code_id = code_id self.command = command self.distribution = distribution @@ -31086,8 +30952,8 @@ class TritonInferencingServer(InferencingServer): All required parameters must be populated in order to send to Azure. - :ivar server_type: Required. [Required] Inferencing server type for various targets.Constant - filled by server. Possible values include: "AzureMLOnline", "AzureMLBatch", "Triton", "Custom". + :ivar server_type: [Required] Inferencing server type for various targets. Required. Known + values are: "AzureMLOnline", "AzureMLBatch", "Triton", and "Custom". :vartype server_type: str or ~azure.mgmt.machinelearningservices.models.InferencingServerType :ivar inference_configuration: Inference configuration for Triton. :vartype inference_configuration: @@ -31095,160 +30961,165 @@ class TritonInferencingServer(InferencingServer): """ _validation = { - 'server_type': {'required': True}, + "server_type": {"required": True}, } _attribute_map = { - 'server_type': {'key': 'serverType', 'type': 'str'}, - 'inference_configuration': {'key': 'inferenceConfiguration', 'type': 'OnlineInferenceConfiguration'}, + "server_type": {"key": "serverType", "type": "str"}, + "inference_configuration": {"key": "inferenceConfiguration", "type": "OnlineInferenceConfiguration"}, } def __init__( - self, - *, - inference_configuration: Optional["OnlineInferenceConfiguration"] = None, - **kwargs - ): + self, *, inference_configuration: Optional["_models.OnlineInferenceConfiguration"] = None, **kwargs: Any + ) -> None: """ :keyword inference_configuration: Inference configuration for Triton. :paramtype inference_configuration: ~azure.mgmt.machinelearningservices.models.OnlineInferenceConfiguration """ - super(TritonInferencingServer, self).__init__(**kwargs) - self.server_type = 'Triton' # type: str + super().__init__(**kwargs) + self.server_type: str = "Triton" self.inference_configuration = inference_configuration -class TritonModelJobInput(JobInput, AssetJobInput): +class TritonModelJobInput(AssetJobInput, JobInput): """TritonModelJobInput. All required parameters must be populated in order to send to Azure. - :ivar mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str :ivar description: Description for the input. :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str """ _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, uri: str, - mode: Optional[Union[str, "InputDeliveryMode"]] = None, description: Optional[str] = None, - **kwargs - ): + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + **kwargs: Any + ) -> None: """ - :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str :keyword description: Description for the input. :paramtype description: str + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str """ - super(TritonModelJobInput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_input_type: str = "triton_model" self.mode = mode self.uri = uri - self.job_input_type = 'triton_model' # type: str - self.description = description -class TritonModelJobOutput(JobOutput, AssetJobOutput): +class TritonModelJobOutput(AssetJobOutput, JobOutput): """TritonModelJobOutput. All required parameters must be populated in order to send to Azure. + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType :ivar asset_name: Output Asset Name. :vartype asset_name: str :ivar asset_version: Output Asset Version. :vartype asset_version: str :ivar auto_delete_setting: Auto delete setting of output data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and "Direct". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :ivar uri: Output Asset URI. :vartype uri: str - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by - server. Possible values include: "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType """ _validation = { - 'job_output_type': {'required': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, + description: Optional[str] = None, asset_name: Optional[str] = None, asset_version: Optional[str] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - mode: Optional[Union[str, "OutputDeliveryMode"]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, uri: Optional[str] = None, - description: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ + :keyword description: Description for the output. + :paramtype description: str :keyword asset_name: Output Asset Name. :paramtype asset_name: str :keyword asset_version: Output Asset Version. :paramtype asset_version: str :keyword auto_delete_setting: Auto delete setting of output data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and "Direct". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :keyword uri: Output Asset URI. :paramtype uri: str - :keyword description: Description for the output. - :paramtype description: str """ - super(TritonModelJobOutput, self).__init__(description=description, asset_name=asset_name, asset_version=asset_version, auto_delete_setting=auto_delete_setting, mode=mode, uri=uri, **kwargs) + super().__init__( + asset_name=asset_name, + asset_version=asset_version, + auto_delete_setting=auto_delete_setting, + mode=mode, + uri=uri, + description=description, + **kwargs + ) + self.description = description + self.job_output_type: str = "triton_model" self.asset_name = asset_name self.asset_version = asset_version self.auto_delete_setting = auto_delete_setting self.mode = mode self.uri = uri - self.job_output_type = 'triton_model' # type: str - self.description = description class TruncationSelectionPolicy(EarlyTerminationPolicy): - """Defines an early termination policy that cancels a given percentage of runs at each evaluation interval. + """Defines an early termination policy that cancels a given percentage of runs at each evaluation + interval. All required parameters must be populated in order to send to Azure. @@ -31256,8 +31127,8 @@ class TruncationSelectionPolicy(EarlyTerminationPolicy): :vartype delay_evaluation: int :ivar evaluation_interval: Interval (number of runs) between policy evaluations. :vartype evaluation_interval: int - :ivar policy_type: Required. [Required] Name of policy configuration.Constant filled by server. - Possible values include: "Bandit", "MedianStopping", "TruncationSelection". + :ivar policy_type: [Required] Name of policy configuration. Required. Known values are: + "Bandit", "MedianStopping", and "TruncationSelection". :vartype policy_type: str or ~azure.mgmt.machinelearningservices.models.EarlyTerminationPolicyType :ivar truncation_percentage: The percentage of runs to cancel at each evaluation interval. @@ -31265,24 +31136,19 @@ class TruncationSelectionPolicy(EarlyTerminationPolicy): """ _validation = { - 'policy_type': {'required': True}, + "policy_type": {"required": True}, } _attribute_map = { - 'delay_evaluation': {'key': 'delayEvaluation', 'type': 'int'}, - 'evaluation_interval': {'key': 'evaluationInterval', 'type': 'int'}, - 'policy_type': {'key': 'policyType', 'type': 'str'}, - 'truncation_percentage': {'key': 'truncationPercentage', 'type': 'int'}, + "delay_evaluation": {"key": "delayEvaluation", "type": "int"}, + "evaluation_interval": {"key": "evaluationInterval", "type": "int"}, + "policy_type": {"key": "policyType", "type": "str"}, + "truncation_percentage": {"key": "truncationPercentage", "type": "int"}, } def __init__( - self, - *, - delay_evaluation: Optional[int] = 0, - evaluation_interval: Optional[int] = 0, - truncation_percentage: Optional[int] = 0, - **kwargs - ): + self, *, delay_evaluation: int = 0, evaluation_interval: int = 0, truncation_percentage: int = 0, **kwargs: Any + ) -> None: """ :keyword delay_evaluation: Number of intervals by which to delay the first evaluation. :paramtype delay_evaluation: int @@ -31291,12 +31157,12 @@ def __init__( :keyword truncation_percentage: The percentage of runs to cancel at each evaluation interval. :paramtype truncation_percentage: int """ - super(TruncationSelectionPolicy, self).__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) - self.policy_type = 'TruncationSelection' # type: str + super().__init__(delay_evaluation=delay_evaluation, evaluation_interval=evaluation_interval, **kwargs) + self.policy_type: str = "TruncationSelection" self.truncation_percentage = truncation_percentage -class UpdateWorkspaceQuotas(msrest.serialization.Model): +class UpdateWorkspaceQuotas(_serialization.Model): """The properties for update Quota response. Variables are only populated by the server, and will be ignored when sending a request. @@ -31306,47 +31172,41 @@ class UpdateWorkspaceQuotas(msrest.serialization.Model): :ivar type: Specifies the resource type. :vartype type: str :ivar limit: The maximum permitted quota of the resource. - :vartype limit: long - :ivar unit: An enum describing the unit of quota measurement. Possible values include: "Count". + :vartype limit: int + :ivar unit: An enum describing the unit of quota measurement. "Count" :vartype unit: str or ~azure.mgmt.machinelearningservices.models.QuotaUnit - :ivar status: Status of update workspace quota. Possible values include: "Undefined", - "Success", "Failure", "InvalidQuotaBelowClusterMinimum", - "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku", - "OperationNotEnabledForRegion". + :ivar status: Status of update workspace quota. Known values are: "Undefined", "Success", + "Failure", "InvalidQuotaBelowClusterMinimum", "InvalidQuotaExceedsSubscriptionLimit", + "InvalidVMFamilyName", "OperationNotSupportedForSku", and "OperationNotEnabledForRegion". :vartype status: str or ~azure.mgmt.machinelearningservices.models.Status """ _validation = { - 'id': {'readonly': True}, - 'type': {'readonly': True}, - 'unit': {'readonly': True}, + "id": {"readonly": True}, + "type": {"readonly": True}, + "unit": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'status': {'key': 'status', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "limit": {"key": "limit", "type": "int"}, + "unit": {"key": "unit", "type": "str"}, + "status": {"key": "status", "type": "str"}, } def __init__( - self, - *, - limit: Optional[int] = None, - status: Optional[Union[str, "Status"]] = None, - **kwargs - ): + self, *, limit: Optional[int] = None, status: Optional[Union[str, "_models.Status"]] = None, **kwargs: Any + ) -> None: """ :keyword limit: The maximum permitted quota of the resource. - :paramtype limit: long - :keyword status: Status of update workspace quota. Possible values include: "Undefined", - "Success", "Failure", "InvalidQuotaBelowClusterMinimum", - "InvalidQuotaExceedsSubscriptionLimit", "InvalidVMFamilyName", "OperationNotSupportedForSku", - "OperationNotEnabledForRegion". + :paramtype limit: int + :keyword status: Status of update workspace quota. Known values are: "Undefined", "Success", + "Failure", "InvalidQuotaBelowClusterMinimum", "InvalidQuotaExceedsSubscriptionLimit", + "InvalidVMFamilyName", "OperationNotSupportedForSku", and "OperationNotEnabledForRegion". :paramtype status: str or ~azure.mgmt.machinelearningservices.models.Status """ - super(UpdateWorkspaceQuotas, self).__init__(**kwargs) + super().__init__(**kwargs) self.id = None self.type = None self.limit = limit @@ -31354,7 +31214,7 @@ def __init__( self.status = status -class UpdateWorkspaceQuotasResult(msrest.serialization.Model): +class UpdateWorkspaceQuotasResult(_serialization.Model): """The result of update workspace quota. Variables are only populated by the server, and will be ignored when sending a request. @@ -31367,22 +31227,18 @@ class UpdateWorkspaceQuotasResult(msrest.serialization.Model): """ _validation = { - 'value': {'readonly': True}, - 'next_link': {'readonly': True}, + "value": {"readonly": True}, + "next_link": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': '[UpdateWorkspaceQuotas]'}, - 'next_link': {'key': 'nextLink', 'type': 'str'}, + "value": {"key": "value", "type": "[UpdateWorkspaceQuotas]"}, + "next_link": {"key": "nextLink", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(UpdateWorkspaceQuotasResult, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.value = None self.next_link = None @@ -31396,7 +31252,7 @@ class UriFileDataVersion(DataVersionBaseProperties): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -31406,11 +31262,11 @@ class UriFileDataVersion(DataVersionBaseProperties): :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is provided it will be used to populate IsArchived. :vartype is_archived: bool - :ivar data_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "uri_file", "uri_folder", "mltable". + :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", + "uri_folder", and "mltable". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType - :ivar data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. + :ivar data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. :vartype data_uri: str :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual Property. @@ -31420,21 +31276,21 @@ class UriFileDataVersion(DataVersionBaseProperties): """ _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "data_type": {"required": True}, + "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, } def __init__( @@ -31444,19 +31300,19 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - is_anonymous: Optional[bool] = False, - is_archived: Optional[bool] = False, - intellectual_property: Optional["IntellectualProperty"] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + intellectual_property: Optional["_models.IntellectualProperty"] = None, stage: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -31466,8 +31322,8 @@ def __init__( :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is provided it will be used to populate IsArchived. :paramtype is_archived: bool - :keyword data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. + :keyword data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. :paramtype data_uri: str :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual Property. @@ -31476,136 +31332,154 @@ def __init__( :keyword stage: Stage in the data lifecycle assigned to this data asset. :paramtype stage: str """ - super(UriFileDataVersion, self).__init__(description=description, properties=properties, tags=tags, auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, data_uri=data_uri, intellectual_property=intellectual_property, stage=stage, **kwargs) - self.data_type = 'uri_file' # type: str - - -class UriFileJobInput(JobInput, AssetJobInput): + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + data_uri=data_uri, + intellectual_property=intellectual_property, + stage=stage, + **kwargs + ) + self.data_type: str = "uri_file" + + +class UriFileJobInput(AssetJobInput, JobInput): """UriFileJobInput. All required parameters must be populated in order to send to Azure. - :ivar mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str :ivar description: Description for the input. :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str """ _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, uri: str, - mode: Optional[Union[str, "InputDeliveryMode"]] = None, description: Optional[str] = None, - **kwargs - ): + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + **kwargs: Any + ) -> None: """ - :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str :keyword description: Description for the input. :paramtype description: str + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str """ - super(UriFileJobInput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_input_type: str = "uri_file" self.mode = mode self.uri = uri - self.job_input_type = 'uri_file' # type: str - self.description = description -class UriFileJobOutput(JobOutput, AssetJobOutput): +class UriFileJobOutput(AssetJobOutput, JobOutput): """UriFileJobOutput. All required parameters must be populated in order to send to Azure. + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType :ivar asset_name: Output Asset Name. :vartype asset_name: str :ivar asset_version: Output Asset Version. :vartype asset_version: str :ivar auto_delete_setting: Auto delete setting of output data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and "Direct". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :ivar uri: Output Asset URI. :vartype uri: str - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by - server. Possible values include: "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType """ _validation = { - 'job_output_type': {'required': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, + description: Optional[str] = None, asset_name: Optional[str] = None, asset_version: Optional[str] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - mode: Optional[Union[str, "OutputDeliveryMode"]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, uri: Optional[str] = None, - description: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ + :keyword description: Description for the output. + :paramtype description: str :keyword asset_name: Output Asset Name. :paramtype asset_name: str :keyword asset_version: Output Asset Version. :paramtype asset_version: str :keyword auto_delete_setting: Auto delete setting of output data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and "Direct". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :keyword uri: Output Asset URI. :paramtype uri: str - :keyword description: Description for the output. - :paramtype description: str """ - super(UriFileJobOutput, self).__init__(description=description, asset_name=asset_name, asset_version=asset_version, auto_delete_setting=auto_delete_setting, mode=mode, uri=uri, **kwargs) + super().__init__( + asset_name=asset_name, + asset_version=asset_version, + auto_delete_setting=auto_delete_setting, + mode=mode, + uri=uri, + description=description, + **kwargs + ) + self.description = description + self.job_output_type: str = "uri_file" self.asset_name = asset_name self.asset_version = asset_version self.auto_delete_setting = auto_delete_setting self.mode = mode self.uri = uri - self.job_output_type = 'uri_file' # type: str - self.description = description class UriFolderDataVersion(DataVersionBaseProperties): @@ -31617,7 +31491,7 @@ class UriFolderDataVersion(DataVersionBaseProperties): :vartype description: str :ivar properties: The asset property dictionary. :vartype properties: dict[str, str] - :ivar tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :ivar tags: Tag dictionary. Tags can be added, removed, and updated. :vartype tags: dict[str, str] :ivar auto_delete_setting: Specifies the lifecycle setting of managed data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -31627,11 +31501,11 @@ class UriFolderDataVersion(DataVersionBaseProperties): :ivar is_archived: Is the asset archived? For types where Stage is defined, when Stage is provided it will be used to populate IsArchived. :vartype is_archived: bool - :ivar data_type: Required. [Required] Specifies the type of data.Constant filled by server. - Possible values include: "uri_file", "uri_folder", "mltable". + :ivar data_type: [Required] Specifies the type of data. Required. Known values are: "uri_file", + "uri_folder", and "mltable". :vartype data_type: str or ~azure.mgmt.machinelearningservices.models.DataType - :ivar data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. + :ivar data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. :vartype data_uri: str :ivar intellectual_property: Intellectual Property details. Used if data is an Intellectual Property. @@ -31641,21 +31515,21 @@ class UriFolderDataVersion(DataVersionBaseProperties): """ _validation = { - 'data_type': {'required': True}, - 'data_uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, + "data_type": {"required": True}, + "data_uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'description': {'key': 'description', 'type': 'str'}, - 'properties': {'key': 'properties', 'type': '{str}'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'}, - 'is_archived': {'key': 'isArchived', 'type': 'bool'}, - 'data_type': {'key': 'dataType', 'type': 'str'}, - 'data_uri': {'key': 'dataUri', 'type': 'str'}, - 'intellectual_property': {'key': 'intellectualProperty', 'type': 'IntellectualProperty'}, - 'stage': {'key': 'stage', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "properties": {"key": "properties", "type": "{str}"}, + "tags": {"key": "tags", "type": "{str}"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "is_anonymous": {"key": "isAnonymous", "type": "bool"}, + "is_archived": {"key": "isArchived", "type": "bool"}, + "data_type": {"key": "dataType", "type": "str"}, + "data_uri": {"key": "dataUri", "type": "str"}, + "intellectual_property": {"key": "intellectualProperty", "type": "IntellectualProperty"}, + "stage": {"key": "stage", "type": "str"}, } def __init__( @@ -31665,19 +31539,19 @@ def __init__( description: Optional[str] = None, properties: Optional[Dict[str, str]] = None, tags: Optional[Dict[str, str]] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - is_anonymous: Optional[bool] = False, - is_archived: Optional[bool] = False, - intellectual_property: Optional["IntellectualProperty"] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + is_anonymous: bool = False, + is_archived: bool = False, + intellectual_property: Optional["_models.IntellectualProperty"] = None, stage: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword description: The asset description text. :paramtype description: str :keyword properties: The asset property dictionary. :paramtype properties: dict[str, str] - :keyword tags: A set of tags. Tag dictionary. Tags can be added, removed, and updated. + :keyword tags: Tag dictionary. Tags can be added, removed, and updated. :paramtype tags: dict[str, str] :keyword auto_delete_setting: Specifies the lifecycle setting of managed data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting @@ -31687,8 +31561,8 @@ def __init__( :keyword is_archived: Is the asset archived? For types where Stage is defined, when Stage is provided it will be used to populate IsArchived. :paramtype is_archived: bool - :keyword data_uri: Required. [Required] Uri of the data. Example: - https://go.microsoft.com/fwlink/?linkid=2202330. + :keyword data_uri: [Required] Uri of the data. Example: + https://go.microsoft.com/fwlink/?linkid=2202330. Required. :paramtype data_uri: str :keyword intellectual_property: Intellectual Property details. Used if data is an Intellectual Property. @@ -31697,139 +31571,157 @@ def __init__( :keyword stage: Stage in the data lifecycle assigned to this data asset. :paramtype stage: str """ - super(UriFolderDataVersion, self).__init__(description=description, properties=properties, tags=tags, auto_delete_setting=auto_delete_setting, is_anonymous=is_anonymous, is_archived=is_archived, data_uri=data_uri, intellectual_property=intellectual_property, stage=stage, **kwargs) - self.data_type = 'uri_folder' # type: str - - -class UriFolderJobInput(JobInput, AssetJobInput): + super().__init__( + description=description, + properties=properties, + tags=tags, + auto_delete_setting=auto_delete_setting, + is_anonymous=is_anonymous, + is_archived=is_archived, + data_uri=data_uri, + intellectual_property=intellectual_property, + stage=stage, + **kwargs + ) + self.data_type: str = "uri_folder" + + +class UriFolderJobInput(AssetJobInput, JobInput): """UriFolderJobInput. All required parameters must be populated in order to send to Azure. - :ivar mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :ivar uri: Required. [Required] Input Asset URI. - :vartype uri: str :ivar description: Description for the input. :vartype description: str - :ivar job_input_type: Required. [Required] Specifies the type of job.Constant filled by server. - Possible values include: "literal", "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". + :ivar job_input_type: [Required] Specifies the type of job. Required. Known values are: + "literal", "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and + "triton_model". :vartype job_input_type: str or ~azure.mgmt.machinelearningservices.models.JobInputType + :ivar mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :vartype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :ivar uri: [Required] Input Asset URI. Required. + :vartype uri: str """ _validation = { - 'uri': {'required': True, 'min_length': 1, 'pattern': r'[a-zA-Z0-9_]'}, - 'job_input_type': {'required': True}, + "job_input_type": {"required": True}, + "uri": {"required": True, "min_length": 1, "pattern": r"[a-zA-Z0-9_]"}, } _attribute_map = { - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_input_type': {'key': 'jobInputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_input_type": {"key": "jobInputType", "type": "str"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, uri: str, - mode: Optional[Union[str, "InputDeliveryMode"]] = None, description: Optional[str] = None, - **kwargs - ): + mode: Optional[Union[str, "_models.InputDeliveryMode"]] = None, + **kwargs: Any + ) -> None: """ - :keyword mode: Input Asset Delivery Mode. Possible values include: "ReadOnlyMount", - "ReadWriteMount", "Download", "Direct", "EvalMount", "EvalDownload". - :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode - :keyword uri: Required. [Required] Input Asset URI. - :paramtype uri: str :keyword description: Description for the input. :paramtype description: str + :keyword mode: Input Asset Delivery Mode. Known values are: "ReadOnlyMount", "ReadWriteMount", + "Download", "Direct", "EvalMount", and "EvalDownload". + :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.InputDeliveryMode + :keyword uri: [Required] Input Asset URI. Required. + :paramtype uri: str """ - super(UriFolderJobInput, self).__init__(description=description, mode=mode, uri=uri, **kwargs) + super().__init__(mode=mode, uri=uri, description=description, **kwargs) + self.description = description + self.job_input_type: str = "uri_folder" self.mode = mode self.uri = uri - self.job_input_type = 'uri_folder' # type: str - self.description = description -class UriFolderJobOutput(JobOutput, AssetJobOutput): +class UriFolderJobOutput(AssetJobOutput, JobOutput): """UriFolderJobOutput. All required parameters must be populated in order to send to Azure. + :ivar description: Description for the output. + :vartype description: str + :ivar job_output_type: [Required] Specifies the type of job. Required. Known values are: + "uri_file", "uri_folder", "mltable", "custom_model", "mlflow_model", and "triton_model". + :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType :ivar asset_name: Output Asset Name. :vartype asset_name: str :ivar asset_version: Output Asset Version. :vartype asset_version: str :ivar auto_delete_setting: Auto delete setting of output data asset. :vartype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :ivar mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", + :ivar mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and "Direct". :vartype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :ivar uri: Output Asset URI. :vartype uri: str - :ivar description: Description for the output. - :vartype description: str - :ivar job_output_type: Required. [Required] Specifies the type of job.Constant filled by - server. Possible values include: "uri_file", "uri_folder", "mltable", "custom_model", - "mlflow_model", "triton_model". - :vartype job_output_type: str or ~azure.mgmt.machinelearningservices.models.JobOutputType """ _validation = { - 'job_output_type': {'required': True}, + "job_output_type": {"required": True}, } _attribute_map = { - 'asset_name': {'key': 'assetName', 'type': 'str'}, - 'asset_version': {'key': 'assetVersion', 'type': 'str'}, - 'auto_delete_setting': {'key': 'autoDeleteSetting', 'type': 'AutoDeleteSetting'}, - 'mode': {'key': 'mode', 'type': 'str'}, - 'uri': {'key': 'uri', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'job_output_type': {'key': 'jobOutputType', 'type': 'str'}, + "description": {"key": "description", "type": "str"}, + "job_output_type": {"key": "jobOutputType", "type": "str"}, + "asset_name": {"key": "assetName", "type": "str"}, + "asset_version": {"key": "assetVersion", "type": "str"}, + "auto_delete_setting": {"key": "autoDeleteSetting", "type": "AutoDeleteSetting"}, + "mode": {"key": "mode", "type": "str"}, + "uri": {"key": "uri", "type": "str"}, } def __init__( self, *, + description: Optional[str] = None, asset_name: Optional[str] = None, asset_version: Optional[str] = None, - auto_delete_setting: Optional["AutoDeleteSetting"] = None, - mode: Optional[Union[str, "OutputDeliveryMode"]] = None, + auto_delete_setting: Optional["_models.AutoDeleteSetting"] = None, + mode: Optional[Union[str, "_models.OutputDeliveryMode"]] = None, uri: Optional[str] = None, - description: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ + :keyword description: Description for the output. + :paramtype description: str :keyword asset_name: Output Asset Name. :paramtype asset_name: str :keyword asset_version: Output Asset Version. :paramtype asset_version: str :keyword auto_delete_setting: Auto delete setting of output data asset. :paramtype auto_delete_setting: ~azure.mgmt.machinelearningservices.models.AutoDeleteSetting - :keyword mode: Output Asset Delivery Mode. Possible values include: "ReadWriteMount", "Upload", + :keyword mode: Output Asset Delivery Mode. Known values are: "ReadWriteMount", "Upload", and "Direct". :paramtype mode: str or ~azure.mgmt.machinelearningservices.models.OutputDeliveryMode :keyword uri: Output Asset URI. :paramtype uri: str - :keyword description: Description for the output. - :paramtype description: str """ - super(UriFolderJobOutput, self).__init__(description=description, asset_name=asset_name, asset_version=asset_version, auto_delete_setting=auto_delete_setting, mode=mode, uri=uri, **kwargs) + super().__init__( + asset_name=asset_name, + asset_version=asset_version, + auto_delete_setting=auto_delete_setting, + mode=mode, + uri=uri, + description=description, + **kwargs + ) + self.description = description + self.job_output_type: str = "uri_folder" self.asset_name = asset_name self.asset_version = asset_version self.auto_delete_setting = auto_delete_setting self.mode = mode self.uri = uri - self.job_output_type = 'uri_folder' # type: str - self.description = description -class Usage(msrest.serialization.Model): +class Usage(_serialization.Model): """Describes AML Resource Usage. Variables are only populated by the server, and will be ignored when sending a request. @@ -31840,43 +31732,39 @@ class Usage(msrest.serialization.Model): :vartype aml_workspace_location: str :ivar type: Specifies the resource type. :vartype type: str - :ivar unit: An enum describing the unit of usage measurement. Possible values include: "Count". + :ivar unit: An enum describing the unit of usage measurement. "Count" :vartype unit: str or ~azure.mgmt.machinelearningservices.models.UsageUnit :ivar current_value: The current usage of the resource. - :vartype current_value: long + :vartype current_value: int :ivar limit: The maximum permitted usage of the resource. - :vartype limit: long + :vartype limit: int :ivar name: The name of the type of usage. :vartype name: ~azure.mgmt.machinelearningservices.models.UsageName """ _validation = { - 'id': {'readonly': True}, - 'aml_workspace_location': {'readonly': True}, - 'type': {'readonly': True}, - 'unit': {'readonly': True}, - 'current_value': {'readonly': True}, - 'limit': {'readonly': True}, - 'name': {'readonly': True}, + "id": {"readonly": True}, + "aml_workspace_location": {"readonly": True}, + "type": {"readonly": True}, + "unit": {"readonly": True}, + "current_value": {"readonly": True}, + "limit": {"readonly": True}, + "name": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'aml_workspace_location': {'key': 'amlWorkspaceLocation', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'unit': {'key': 'unit', 'type': 'str'}, - 'current_value': {'key': 'currentValue', 'type': 'long'}, - 'limit': {'key': 'limit', 'type': 'long'}, - 'name': {'key': 'name', 'type': 'UsageName'}, + "id": {"key": "id", "type": "str"}, + "aml_workspace_location": {"key": "amlWorkspaceLocation", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "unit": {"key": "unit", "type": "str"}, + "current_value": {"key": "currentValue", "type": "int"}, + "limit": {"key": "limit", "type": "int"}, + "name": {"key": "name", "type": "UsageName"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(Usage, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.id = None self.aml_workspace_location = None self.type = None @@ -31886,7 +31774,7 @@ def __init__( self.name = None -class UsageName(msrest.serialization.Model): +class UsageName(_serialization.Model): """The Usage Names. Variables are only populated by the server, and will be ignored when sending a request. @@ -31898,33 +31786,29 @@ class UsageName(msrest.serialization.Model): """ _validation = { - 'value': {'readonly': True}, - 'localized_value': {'readonly': True}, + "value": {"readonly": True}, + "localized_value": {"readonly": True}, } _attribute_map = { - 'value': {'key': 'value', 'type': 'str'}, - 'localized_value': {'key': 'localizedValue', 'type': 'str'}, + "value": {"key": "value", "type": "str"}, + "localized_value": {"key": "localizedValue", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(UsageName, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.value = None self.localized_value = None -class UserAccountCredentials(msrest.serialization.Model): +class UserAccountCredentials(_serialization.Model): """Settings for user account that gets created on each on the nodes of a compute. All required parameters must be populated in order to send to Azure. - :ivar admin_user_name: Required. Name of the administrator user account which can be used to - SSH to nodes. + :ivar admin_user_name: Name of the administrator user account which can be used to SSH to + nodes. Required. :vartype admin_user_name: str :ivar admin_user_ssh_public_key: SSH public key of the administrator user account. :vartype admin_user_ssh_public_key: str @@ -31933,13 +31817,13 @@ class UserAccountCredentials(msrest.serialization.Model): """ _validation = { - 'admin_user_name': {'required': True}, + "admin_user_name": {"required": True}, } _attribute_map = { - 'admin_user_name': {'key': 'adminUserName', 'type': 'str'}, - 'admin_user_ssh_public_key': {'key': 'adminUserSshPublicKey', 'type': 'str'}, - 'admin_user_password': {'key': 'adminUserPassword', 'type': 'str'}, + "admin_user_name": {"key": "adminUserName", "type": "str"}, + "admin_user_ssh_public_key": {"key": "adminUserSshPublicKey", "type": "str"}, + "admin_user_password": {"key": "adminUserPassword", "type": "str"}, } def __init__( @@ -31948,24 +31832,24 @@ def __init__( admin_user_name: str, admin_user_ssh_public_key: Optional[str] = None, admin_user_password: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ - :keyword admin_user_name: Required. Name of the administrator user account which can be used to - SSH to nodes. + :keyword admin_user_name: Name of the administrator user account which can be used to SSH to + nodes. Required. :paramtype admin_user_name: str :keyword admin_user_ssh_public_key: SSH public key of the administrator user account. :paramtype admin_user_ssh_public_key: str :keyword admin_user_password: Password of the administrator user account. :paramtype admin_user_password: str """ - super(UserAccountCredentials, self).__init__(**kwargs) + super().__init__(**kwargs) self.admin_user_name = admin_user_name self.admin_user_ssh_public_key = admin_user_ssh_public_key self.admin_user_password = admin_user_password -class UserAssignedIdentity(msrest.serialization.Model): +class UserAssignedIdentity(_serialization.Model): """User assigned identity properties. Variables are only populated by the server, and will be ignored when sending a request. @@ -31977,27 +31861,23 @@ class UserAssignedIdentity(msrest.serialization.Model): """ _validation = { - 'principal_id': {'readonly': True}, - 'client_id': {'readonly': True}, + "principal_id": {"readonly": True}, + "client_id": {"readonly": True}, } _attribute_map = { - 'principal_id': {'key': 'principalId', 'type': 'str'}, - 'client_id': {'key': 'clientId', 'type': 'str'}, + "principal_id": {"key": "principalId", "type": "str"}, + "client_id": {"key": "clientId", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(UserAssignedIdentity, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.principal_id = None self.client_id = None -class UserCreatedAcrAccount(msrest.serialization.Model): +class UserCreatedAcrAccount(_serialization.Model): """UserCreatedAcrAccount. :ivar arm_resource_id: ARM ResourceId of a resource. @@ -32005,24 +31885,19 @@ class UserCreatedAcrAccount(msrest.serialization.Model): """ _attribute_map = { - 'arm_resource_id': {'key': 'armResourceId', 'type': 'ArmResourceId'}, + "arm_resource_id": {"key": "armResourceId", "type": "ArmResourceId"}, } - def __init__( - self, - *, - arm_resource_id: Optional["ArmResourceId"] = None, - **kwargs - ): + def __init__(self, *, arm_resource_id: Optional["_models.ArmResourceId"] = None, **kwargs: Any) -> None: """ :keyword arm_resource_id: ARM ResourceId of a resource. :paramtype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId """ - super(UserCreatedAcrAccount, self).__init__(**kwargs) + super().__init__(**kwargs) self.arm_resource_id = arm_resource_id -class UserCreatedStorageAccount(msrest.serialization.Model): +class UserCreatedStorageAccount(_serialization.Model): """UserCreatedStorageAccount. :ivar arm_resource_id: ARM ResourceId of a resource. @@ -32030,20 +31905,15 @@ class UserCreatedStorageAccount(msrest.serialization.Model): """ _attribute_map = { - 'arm_resource_id': {'key': 'armResourceId', 'type': 'ArmResourceId'}, + "arm_resource_id": {"key": "armResourceId", "type": "ArmResourceId"}, } - def __init__( - self, - *, - arm_resource_id: Optional["ArmResourceId"] = None, - **kwargs - ): + def __init__(self, *, arm_resource_id: Optional["_models.ArmResourceId"] = None, **kwargs: Any) -> None: """ :keyword arm_resource_id: ARM ResourceId of a resource. :paramtype arm_resource_id: ~azure.mgmt.machinelearningservices.models.ArmResourceId """ - super(UserCreatedStorageAccount, self).__init__(**kwargs) + super().__init__(**kwargs) self.arm_resource_id = arm_resource_id @@ -32052,48 +31922,51 @@ class UserIdentity(IdentityConfiguration): All required parameters must be populated in order to send to Azure. - :ivar identity_type: Required. [Required] Specifies the type of identity framework.Constant - filled by server. Possible values include: "Managed", "AMLToken", "UserIdentity". + :ivar identity_type: [Required] Specifies the type of identity framework. Required. Known + values are: "Managed", "AMLToken", and "UserIdentity". :vartype identity_type: str or ~azure.mgmt.machinelearningservices.models.IdentityConfigurationType """ _validation = { - 'identity_type': {'required': True}, + "identity_type": {"required": True}, } _attribute_map = { - 'identity_type': {'key': 'identityType', 'type': 'str'}, + "identity_type": {"key": "identityType", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(UserIdentity, self).__init__(**kwargs) - self.identity_type = 'UserIdentity' # type: str + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) + self.identity_type: str = "UserIdentity" class UsernamePasswordAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionPropertiesV2): """UsernamePasswordAuthTypeWorkspaceConnectionProperties. + Variables are only populated by the server, and will be ignored when sending a request. + All required parameters must be populated in order to send to Azure. - :ivar auth_type: Required. Authentication type of the connection target.Constant filled by - server. Possible values include: "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", - "ServicePrincipal", "AccessKey", "ApiKey", "CustomKeys". + :ivar auth_type: Authentication type of the connection target. Required. Known values are: + "PAT", "ManagedIdentity", "UsernamePassword", "None", "SAS", "ServicePrincipal", "AccessKey", + "ApiKey", and "CustomKeys". :vartype auth_type: str or ~azure.mgmt.machinelearningservices.models.ConnectionAuthType - :ivar category: Category of the connection. Possible values include: "PythonFeed", + :ivar category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :vartype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory + :ivar created_by_workspace_arm_id: The arm id of the workspace which created this connection. + :vartype created_by_workspace_arm_id: str :ivar expiry_time: :vartype expiry_time: ~datetime.datetime + :ivar is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :vartype is_shared_to_all: bool :ivar metadata: Any object. - :vartype metadata: any + :vartype metadata: JSON :ivar target: :vartype target: str :ivar credentials: @@ -32102,50 +31975,64 @@ class UsernamePasswordAuthTypeWorkspaceConnectionProperties(WorkspaceConnectionP """ _validation = { - 'auth_type': {'required': True}, + "auth_type": {"required": True}, + "created_by_workspace_arm_id": {"readonly": True}, } _attribute_map = { - 'auth_type': {'key': 'authType', 'type': 'str'}, - 'category': {'key': 'category', 'type': 'str'}, - 'expiry_time': {'key': 'expiryTime', 'type': 'iso-8601'}, - 'metadata': {'key': 'metadata', 'type': 'object'}, - 'target': {'key': 'target', 'type': 'str'}, - 'credentials': {'key': 'credentials', 'type': 'WorkspaceConnectionUsernamePassword'}, + "auth_type": {"key": "authType", "type": "str"}, + "category": {"key": "category", "type": "str"}, + "created_by_workspace_arm_id": {"key": "createdByWorkspaceArmId", "type": "str"}, + "expiry_time": {"key": "expiryTime", "type": "iso-8601"}, + "is_shared_to_all": {"key": "isSharedToAll", "type": "bool"}, + "metadata": {"key": "metadata", "type": "object"}, + "target": {"key": "target", "type": "str"}, + "credentials": {"key": "credentials", "type": "WorkspaceConnectionUsernamePassword"}, } def __init__( self, *, - category: Optional[Union[str, "ConnectionCategory"]] = None, + category: Optional[Union[str, "_models.ConnectionCategory"]] = None, expiry_time: Optional[datetime.datetime] = None, - metadata: Optional[Any] = None, + is_shared_to_all: Optional[bool] = None, + metadata: Optional[JSON] = None, target: Optional[str] = None, - credentials: Optional["WorkspaceConnectionUsernamePassword"] = None, - **kwargs - ): + credentials: Optional["_models.WorkspaceConnectionUsernamePassword"] = None, + **kwargs: Any + ) -> None: """ - :keyword category: Category of the connection. Possible values include: "PythonFeed", + :keyword category: Category of the connection. Known values are: "PythonFeed", "ContainerRegistry", "Git", "S3", "Snowflake", "AzureSqlDb", "AzureSynapseAnalytics", "AzureMySqlDb", "AzurePostgresDb", "ADLSGen2", "Redis", "ApiKey", "AzureOpenAI", - "CognitiveSearch", "CognitiveService", "CustomKeys". + "CognitiveSearch", "CognitiveService", and "CustomKeys". :paramtype category: str or ~azure.mgmt.machinelearningservices.models.ConnectionCategory :keyword expiry_time: :paramtype expiry_time: ~datetime.datetime + :keyword is_shared_to_all: whether this connection will be shared to all the project workspace + under the hub. + :paramtype is_shared_to_all: bool :keyword metadata: Any object. - :paramtype metadata: any + :paramtype metadata: JSON :keyword target: :paramtype target: str :keyword credentials: :paramtype credentials: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUsernamePassword """ - super(UsernamePasswordAuthTypeWorkspaceConnectionProperties, self).__init__(category=category, expiry_time=expiry_time, metadata=metadata, target=target, **kwargs) - self.auth_type = 'UsernamePassword' # type: str + super().__init__( + category=category, + expiry_time=expiry_time, + is_shared_to_all=is_shared_to_all, + metadata=metadata, + target=target, + **kwargs + ) + self.auth_type: str = "UsernamePassword" self.credentials = credentials -class VirtualMachineSchema(msrest.serialization.Model): +class VirtualMachineSchema(_serialization.Model): """VirtualMachineSchema. :ivar properties: @@ -32153,25 +32040,20 @@ class VirtualMachineSchema(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'VirtualMachineSchemaProperties'}, + "properties": {"key": "properties", "type": "VirtualMachineSchemaProperties"}, } - def __init__( - self, - *, - properties: Optional["VirtualMachineSchemaProperties"] = None, - **kwargs - ): + def __init__(self, *, properties: Optional["_models.VirtualMachineSchemaProperties"] = None, **kwargs: Any) -> None: """ :keyword properties: :paramtype properties: ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties """ - super(VirtualMachineSchema, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class VirtualMachine(Compute, VirtualMachineSchema): +class VirtualMachine(Compute, VirtualMachineSchema): # pylint: disable=too-many-instance-attributes """A Machine Learning compute based on Azure Virtual Machines. Variables are only populated by the server, and will be ignored when sending a request. @@ -32180,15 +32062,15 @@ class VirtualMachine(Compute, VirtualMachineSchema): :ivar properties: :vartype properties: ~azure.mgmt.machinelearningservices.models.VirtualMachineSchemaProperties - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType :ivar compute_location: Location for the underlying compute. :vartype compute_location: str :ivar provisioning_state: The provision state of the cluster. Valid values are Unknown, - Updating, Provisioning, Succeeded, and Failed. Possible values include: "Unknown", "Updating", - "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + Updating, Provisioning, Succeeded, and Failed. Known values are: "Unknown", "Updating", + "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ProvisioningState :ivar description: The description of the Machine Learning compute. @@ -32210,38 +32092,38 @@ class VirtualMachine(Compute, VirtualMachineSchema): """ _validation = { - 'compute_type': {'required': True}, - 'provisioning_state': {'readonly': True}, - 'created_on': {'readonly': True}, - 'modified_on': {'readonly': True}, - 'provisioning_errors': {'readonly': True}, - 'is_attached_compute': {'readonly': True}, + "compute_type": {"required": True}, + "provisioning_state": {"readonly": True}, + "created_on": {"readonly": True}, + "modified_on": {"readonly": True}, + "provisioning_errors": {"readonly": True}, + "is_attached_compute": {"readonly": True}, } _attribute_map = { - 'properties': {'key': 'properties', 'type': 'VirtualMachineSchemaProperties'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, - 'compute_location': {'key': 'computeLocation', 'type': 'str'}, - 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, - 'description': {'key': 'description', 'type': 'str'}, - 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, - 'modified_on': {'key': 'modifiedOn', 'type': 'iso-8601'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, - 'provisioning_errors': {'key': 'provisioningErrors', 'type': '[ErrorResponse]'}, - 'is_attached_compute': {'key': 'isAttachedCompute', 'type': 'bool'}, - 'disable_local_auth': {'key': 'disableLocalAuth', 'type': 'bool'}, + "properties": {"key": "properties", "type": "VirtualMachineSchemaProperties"}, + "compute_type": {"key": "computeType", "type": "str"}, + "compute_location": {"key": "computeLocation", "type": "str"}, + "provisioning_state": {"key": "provisioningState", "type": "str"}, + "description": {"key": "description", "type": "str"}, + "created_on": {"key": "createdOn", "type": "iso-8601"}, + "modified_on": {"key": "modifiedOn", "type": "iso-8601"}, + "resource_id": {"key": "resourceId", "type": "str"}, + "provisioning_errors": {"key": "provisioningErrors", "type": "[ErrorResponse]"}, + "is_attached_compute": {"key": "isAttachedCompute", "type": "bool"}, + "disable_local_auth": {"key": "disableLocalAuth", "type": "bool"}, } def __init__( self, *, - properties: Optional["VirtualMachineSchemaProperties"] = None, + properties: Optional["_models.VirtualMachineSchemaProperties"] = None, compute_location: Optional[str] = None, description: Optional[str] = None, resource_id: Optional[str] = None, disable_local_auth: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword properties: :paramtype properties: @@ -32256,9 +32138,16 @@ def __init__( MSI and AAD exclusively for authentication. :paramtype disable_local_auth: bool """ - super(VirtualMachine, self).__init__(compute_location=compute_location, description=description, resource_id=resource_id, disable_local_auth=disable_local_auth, properties=properties, **kwargs) + super().__init__( + compute_location=compute_location, + description=description, + resource_id=resource_id, + disable_local_auth=disable_local_auth, + properties=properties, + **kwargs + ) self.properties = properties - self.compute_type = 'VirtualMachine' # type: str + self.compute_type: str = "VirtualMachine" self.compute_location = compute_location self.provisioning_state = None self.description = description @@ -32270,38 +32159,33 @@ def __init__( self.disable_local_auth = disable_local_auth -class VirtualMachineImage(msrest.serialization.Model): +class VirtualMachineImage(_serialization.Model): """Virtual Machine image for Windows AML Compute. All required parameters must be populated in order to send to Azure. - :ivar id: Required. Virtual Machine image path. + :ivar id: Virtual Machine image path. Required. :vartype id: str """ _validation = { - 'id': {'required': True}, + "id": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, } - def __init__( - self, - *, - id: str, - **kwargs - ): + def __init__(self, *, id: str, **kwargs: Any) -> None: # pylint: disable=redefined-builtin """ - :keyword id: Required. Virtual Machine image path. + :keyword id: Virtual Machine image path. Required. :paramtype id: str """ - super(VirtualMachineImage, self).__init__(**kwargs) + super().__init__(**kwargs) self.id = id -class VirtualMachineSchemaProperties(msrest.serialization.Model): +class VirtualMachineSchemaProperties(_serialization.Model): """VirtualMachineSchemaProperties. :ivar virtual_machine_size: Virtual Machine size. @@ -32321,12 +32205,12 @@ class VirtualMachineSchemaProperties(msrest.serialization.Model): """ _attribute_map = { - 'virtual_machine_size': {'key': 'virtualMachineSize', 'type': 'str'}, - 'ssh_port': {'key': 'sshPort', 'type': 'int'}, - 'notebook_server_port': {'key': 'notebookServerPort', 'type': 'int'}, - 'address': {'key': 'address', 'type': 'str'}, - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, - 'is_notebook_instance_compute': {'key': 'isNotebookInstanceCompute', 'type': 'bool'}, + "virtual_machine_size": {"key": "virtualMachineSize", "type": "str"}, + "ssh_port": {"key": "sshPort", "type": "int"}, + "notebook_server_port": {"key": "notebookServerPort", "type": "int"}, + "address": {"key": "address", "type": "str"}, + "administrator_account": {"key": "administratorAccount", "type": "VirtualMachineSshCredentials"}, + "is_notebook_instance_compute": {"key": "isNotebookInstanceCompute", "type": "bool"}, } def __init__( @@ -32336,10 +32220,10 @@ def __init__( ssh_port: Optional[int] = None, notebook_server_port: Optional[int] = None, address: Optional[str] = None, - administrator_account: Optional["VirtualMachineSshCredentials"] = None, + administrator_account: Optional["_models.VirtualMachineSshCredentials"] = None, is_notebook_instance_compute: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword virtual_machine_size: Virtual Machine size. :paramtype virtual_machine_size: str @@ -32356,7 +32240,7 @@ def __init__( notebooks. :paramtype is_notebook_instance_compute: bool """ - super(VirtualMachineSchemaProperties, self).__init__(**kwargs) + super().__init__(**kwargs) self.virtual_machine_size = virtual_machine_size self.ssh_port = ssh_port self.notebook_server_port = notebook_server_port @@ -32365,7 +32249,7 @@ def __init__( self.is_notebook_instance_compute = is_notebook_instance_compute -class VirtualMachineSecretsSchema(msrest.serialization.Model): +class VirtualMachineSecretsSchema(_serialization.Model): """VirtualMachineSecretsSchema. :ivar administrator_account: Admin credentials for virtual machine. @@ -32374,21 +32258,18 @@ class VirtualMachineSecretsSchema(msrest.serialization.Model): """ _attribute_map = { - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, + "administrator_account": {"key": "administratorAccount", "type": "VirtualMachineSshCredentials"}, } def __init__( - self, - *, - administrator_account: Optional["VirtualMachineSshCredentials"] = None, - **kwargs - ): + self, *, administrator_account: Optional["_models.VirtualMachineSshCredentials"] = None, **kwargs: Any + ) -> None: """ :keyword administrator_account: Admin credentials for virtual machine. :paramtype administrator_account: ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials """ - super(VirtualMachineSecretsSchema, self).__init__(**kwargs) + super().__init__(**kwargs) self.administrator_account = administrator_account @@ -32400,38 +32281,35 @@ class VirtualMachineSecrets(ComputeSecrets, VirtualMachineSecretsSchema): :ivar administrator_account: Admin credentials for virtual machine. :vartype administrator_account: ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials - :ivar compute_type: Required. The type of compute.Constant filled by server. Possible values - include: "AKS", "Kubernetes", "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", - "HDInsight", "Databricks", "DataLakeAnalytics", "SynapseSpark". + :ivar compute_type: The type of compute. Required. Known values are: "AKS", "Kubernetes", + "AmlCompute", "ComputeInstance", "DataFactory", "VirtualMachine", "HDInsight", "Databricks", + "DataLakeAnalytics", and "SynapseSpark". :vartype compute_type: str or ~azure.mgmt.machinelearningservices.models.ComputeType """ _validation = { - 'compute_type': {'required': True}, + "compute_type": {"required": True}, } _attribute_map = { - 'administrator_account': {'key': 'administratorAccount', 'type': 'VirtualMachineSshCredentials'}, - 'compute_type': {'key': 'computeType', 'type': 'str'}, + "administrator_account": {"key": "administratorAccount", "type": "VirtualMachineSshCredentials"}, + "compute_type": {"key": "computeType", "type": "str"}, } def __init__( - self, - *, - administrator_account: Optional["VirtualMachineSshCredentials"] = None, - **kwargs - ): + self, *, administrator_account: Optional["_models.VirtualMachineSshCredentials"] = None, **kwargs: Any + ) -> None: """ :keyword administrator_account: Admin credentials for virtual machine. :paramtype administrator_account: ~azure.mgmt.machinelearningservices.models.VirtualMachineSshCredentials """ - super(VirtualMachineSecrets, self).__init__(administrator_account=administrator_account, **kwargs) + super().__init__(administrator_account=administrator_account, **kwargs) self.administrator_account = administrator_account - self.compute_type = 'VirtualMachine' # type: str + self.compute_type: str = "VirtualMachine" -class VirtualMachineSize(msrest.serialization.Model): +class VirtualMachineSize(_serialization.Model): # pylint: disable=too-many-instance-attributes """Describes the properties of a VM size. Variables are only populated by the server, and will be ignored when sending a request. @@ -32463,38 +32341,38 @@ class VirtualMachineSize(msrest.serialization.Model): """ _validation = { - 'name': {'readonly': True}, - 'family': {'readonly': True}, - 'v_cp_us': {'readonly': True}, - 'gpus': {'readonly': True}, - 'os_vhd_size_mb': {'readonly': True}, - 'max_resource_volume_mb': {'readonly': True}, - 'memory_gb': {'readonly': True}, - 'low_priority_capable': {'readonly': True}, - 'premium_io': {'readonly': True}, + "name": {"readonly": True}, + "family": {"readonly": True}, + "v_cp_us": {"readonly": True}, + "gpus": {"readonly": True}, + "os_vhd_size_mb": {"readonly": True}, + "max_resource_volume_mb": {"readonly": True}, + "memory_gb": {"readonly": True}, + "low_priority_capable": {"readonly": True}, + "premium_io": {"readonly": True}, } _attribute_map = { - 'name': {'key': 'name', 'type': 'str'}, - 'family': {'key': 'family', 'type': 'str'}, - 'v_cp_us': {'key': 'vCPUs', 'type': 'int'}, - 'gpus': {'key': 'gpus', 'type': 'int'}, - 'os_vhd_size_mb': {'key': 'osVhdSizeMB', 'type': 'int'}, - 'max_resource_volume_mb': {'key': 'maxResourceVolumeMB', 'type': 'int'}, - 'memory_gb': {'key': 'memoryGB', 'type': 'float'}, - 'low_priority_capable': {'key': 'lowPriorityCapable', 'type': 'bool'}, - 'premium_io': {'key': 'premiumIO', 'type': 'bool'}, - 'estimated_vm_prices': {'key': 'estimatedVMPrices', 'type': 'EstimatedVMPrices'}, - 'supported_compute_types': {'key': 'supportedComputeTypes', 'type': '[str]'}, + "name": {"key": "name", "type": "str"}, + "family": {"key": "family", "type": "str"}, + "v_cp_us": {"key": "vCPUs", "type": "int"}, + "gpus": {"key": "gpus", "type": "int"}, + "os_vhd_size_mb": {"key": "osVhdSizeMB", "type": "int"}, + "max_resource_volume_mb": {"key": "maxResourceVolumeMB", "type": "int"}, + "memory_gb": {"key": "memoryGB", "type": "float"}, + "low_priority_capable": {"key": "lowPriorityCapable", "type": "bool"}, + "premium_io": {"key": "premiumIO", "type": "bool"}, + "estimated_vm_prices": {"key": "estimatedVMPrices", "type": "EstimatedVMPrices"}, + "supported_compute_types": {"key": "supportedComputeTypes", "type": "[str]"}, } def __init__( self, *, - estimated_vm_prices: Optional["EstimatedVMPrices"] = None, + estimated_vm_prices: Optional["_models.EstimatedVMPrices"] = None, supported_compute_types: Optional[List[str]] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword estimated_vm_prices: The estimated price information for using a VM. :paramtype estimated_vm_prices: ~azure.mgmt.machinelearningservices.models.EstimatedVMPrices @@ -32502,7 +32380,7 @@ def __init__( size. :paramtype supported_compute_types: list[str] """ - super(VirtualMachineSize, self).__init__(**kwargs) + super().__init__(**kwargs) self.name = None self.family = None self.v_cp_us = None @@ -32516,7 +32394,7 @@ def __init__( self.supported_compute_types = supported_compute_types -class VirtualMachineSizeListResult(msrest.serialization.Model): +class VirtualMachineSizeListResult(_serialization.Model): """The List Virtual Machine size operation response. :ivar value: The list of virtual machine sizes supported by AmlCompute. @@ -32524,24 +32402,19 @@ class VirtualMachineSizeListResult(msrest.serialization.Model): """ _attribute_map = { - 'value': {'key': 'value', 'type': '[VirtualMachineSize]'}, + "value": {"key": "value", "type": "[VirtualMachineSize]"}, } - def __init__( - self, - *, - value: Optional[List["VirtualMachineSize"]] = None, - **kwargs - ): + def __init__(self, *, value: Optional[List["_models.VirtualMachineSize"]] = None, **kwargs: Any) -> None: """ :keyword value: The list of virtual machine sizes supported by AmlCompute. :paramtype value: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] """ - super(VirtualMachineSizeListResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.value = value -class VirtualMachineSshCredentials(msrest.serialization.Model): +class VirtualMachineSshCredentials(_serialization.Model): """Admin credentials for virtual machine. :ivar username: Username of admin account. @@ -32555,10 +32428,10 @@ class VirtualMachineSshCredentials(msrest.serialization.Model): """ _attribute_map = { - 'username': {'key': 'username', 'type': 'str'}, - 'password': {'key': 'password', 'type': 'str'}, - 'public_key_data': {'key': 'publicKeyData', 'type': 'str'}, - 'private_key_data': {'key': 'privateKeyData', 'type': 'str'}, + "username": {"key": "username", "type": "str"}, + "password": {"key": "password", "type": "str"}, + "public_key_data": {"key": "publicKeyData", "type": "str"}, + "private_key_data": {"key": "privateKeyData", "type": "str"}, } def __init__( @@ -32568,8 +32441,8 @@ def __init__( password: Optional[str] = None, public_key_data: Optional[str] = None, private_key_data: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword username: Username of admin account. :paramtype username: str @@ -32580,18 +32453,18 @@ def __init__( :keyword private_key_data: Private key data. :paramtype private_key_data: str """ - super(VirtualMachineSshCredentials, self).__init__(**kwargs) + super().__init__(**kwargs) self.username = username self.password = password self.public_key_data = public_key_data self.private_key_data = private_key_data -class VolumeDefinition(msrest.serialization.Model): +class VolumeDefinition(_serialization.Model): """VolumeDefinition. - :ivar type: Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe. Possible - values include: "bind", "volume", "tmpfs", "npipe". Default value: "bind". + :ivar type: Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe. Known values + are: "bind", "volume", "tmpfs", and "npipe". :vartype type: str or ~azure.mgmt.machinelearningservices.models.VolumeDefinitionType :ivar read_only: Indicate whether to mount volume as readOnly. Default value for this is false. :vartype read_only: bool @@ -32610,32 +32483,32 @@ class VolumeDefinition(msrest.serialization.Model): """ _attribute_map = { - 'type': {'key': 'type', 'type': 'str'}, - 'read_only': {'key': 'readOnly', 'type': 'bool'}, - 'source': {'key': 'source', 'type': 'str'}, - 'target': {'key': 'target', 'type': 'str'}, - 'consistency': {'key': 'consistency', 'type': 'str'}, - 'bind': {'key': 'bind', 'type': 'BindOptions'}, - 'volume': {'key': 'volume', 'type': 'VolumeOptions'}, - 'tmpfs': {'key': 'tmpfs', 'type': 'TmpfsOptions'}, + "type": {"key": "type", "type": "str"}, + "read_only": {"key": "readOnly", "type": "bool"}, + "source": {"key": "source", "type": "str"}, + "target": {"key": "target", "type": "str"}, + "consistency": {"key": "consistency", "type": "str"}, + "bind": {"key": "bind", "type": "BindOptions"}, + "volume": {"key": "volume", "type": "VolumeOptions"}, + "tmpfs": {"key": "tmpfs", "type": "TmpfsOptions"}, } def __init__( self, *, - type: Optional[Union[str, "VolumeDefinitionType"]] = "bind", + type: Union[str, "_models.VolumeDefinitionType"] = "bind", read_only: Optional[bool] = None, source: Optional[str] = None, target: Optional[str] = None, consistency: Optional[str] = None, - bind: Optional["BindOptions"] = None, - volume: Optional["VolumeOptions"] = None, - tmpfs: Optional["TmpfsOptions"] = None, - **kwargs - ): - """ - :keyword type: Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe. Possible - values include: "bind", "volume", "tmpfs", "npipe". Default value: "bind". + bind: Optional["_models.BindOptions"] = None, + volume: Optional["_models.VolumeOptions"] = None, + tmpfs: Optional["_models.TmpfsOptions"] = None, + **kwargs: Any + ) -> None: + """ + :keyword type: Type of Volume Definition. Possible Values: bind,volume,tmpfs,npipe. Known + values are: "bind", "volume", "tmpfs", and "npipe". :paramtype type: str or ~azure.mgmt.machinelearningservices.models.VolumeDefinitionType :keyword read_only: Indicate whether to mount volume as readOnly. Default value for this is false. @@ -32653,7 +32526,7 @@ def __init__( :keyword tmpfs: tmpfs option of the mount. :paramtype tmpfs: ~azure.mgmt.machinelearningservices.models.TmpfsOptions """ - super(VolumeDefinition, self).__init__(**kwargs) + super().__init__(**kwargs) self.type = type self.read_only = read_only self.source = source @@ -32664,7 +32537,7 @@ def __init__( self.tmpfs = tmpfs -class VolumeOptions(msrest.serialization.Model): +class VolumeOptions(_serialization.Model): """VolumeOptions. :ivar nocopy: Indicate whether volume is nocopy. @@ -32672,24 +32545,19 @@ class VolumeOptions(msrest.serialization.Model): """ _attribute_map = { - 'nocopy': {'key': 'nocopy', 'type': 'bool'}, + "nocopy": {"key": "nocopy", "type": "bool"}, } - def __init__( - self, - *, - nocopy: Optional[bool] = None, - **kwargs - ): + def __init__(self, *, nocopy: Optional[bool] = None, **kwargs: Any) -> None: """ :keyword nocopy: Indicate whether volume is nocopy. :paramtype nocopy: bool """ - super(VolumeOptions, self).__init__(**kwargs) + super().__init__(**kwargs) self.nocopy = nocopy -class Workspace(Resource): +class Workspace(Resource): # pylint: disable=too-many-instance-attributes """An object that represents a machine learning workspace. Variables are only populated by the server, and will be ignored when sending a request. @@ -32714,7 +32582,7 @@ class Workspace(Resource): :ivar sku: Optional. This field is required to be implemented by the RP because AML is supporting more than one tier. :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar tags: A set of tags. Dictionary of :code:``. + :ivar tags: Dictionary of :code:``. :vartype tags: dict[str, str] :ivar allow_public_access_when_behind_vnet: The flag to indicate whether to allow public access when behind VNet. @@ -32771,14 +32639,17 @@ class Workspace(Resource): :ivar private_link_count: Count of private connections in the workspace. :vartype private_link_count: int :ivar provisioning_state: The current deployment state of workspace resource. The - provisioningState is to indicate states for resource provisioning. Possible values include: - "Unknown", "Updating", "Creating", "Deleting", "Succeeded", "Failed", "Canceled". + provisioningState is to indicate states for resource provisioning. Known values are: "Unknown", + "Updating", "Creating", "Deleting", "Succeeded", "Failed", and "Canceled". :vartype provisioning_state: str or ~azure.mgmt.machinelearningservices.models.ProvisioningState - :ivar public_network_access: Whether requests from Public Network are allowed. Possible values - include: "Enabled", "Disabled". + :ivar public_network_access: Whether requests from Public Network are allowed. Known values + are: "Enabled" and "Disabled". :vartype public_network_access: str or ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType + :ivar serverless_compute_settings: Settings for serverless compute created in the workspace. + :vartype serverless_compute_settings: + ~azure.mgmt.machinelearningservices.models.ServerlessComputeSettings :ivar service_managed_resources_settings: The service managed resource settings. :vartype service_managed_resources_settings: ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings @@ -32814,77 +32685,90 @@ class Workspace(Resource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'ml_flow_tracking_uri': {'readonly': True}, - 'notebook_info': {'readonly': True}, - 'private_endpoint_connections': {'readonly': True}, - 'private_link_count': {'readonly': True}, - 'provisioning_state': {'readonly': True}, - 'service_provisioned_resource_group': {'readonly': True}, - 'storage_hns_enabled': {'readonly': True}, - 'tenant_id': {'readonly': True}, - 'workspace_id': {'readonly': True}, - } - - _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'kind': {'key': 'kind', 'type': 'str'}, - 'location': {'key': 'location', 'type': 'str'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'allow_public_access_when_behind_vnet': {'key': 'properties.allowPublicAccessWhenBehindVnet', 'type': 'bool'}, - 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, - 'associated_workspaces': {'key': 'properties.associatedWorkspaces', 'type': '[str]'}, - 'container_registries': {'key': 'properties.containerRegistries', 'type': '[str]'}, - 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'discovery_url': {'key': 'properties.discoveryUrl', 'type': 'str'}, - 'enable_data_isolation': {'key': 'properties.enableDataIsolation', 'type': 'bool'}, - 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionProperty'}, - 'existing_workspaces': {'key': 'properties.existingWorkspaces', 'type': '[str]'}, - 'feature_store_settings': {'key': 'properties.featureStoreSettings', 'type': 'FeatureStoreSettings'}, - 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, - 'hbi_workspace': {'key': 'properties.hbiWorkspace', 'type': 'bool'}, - 'hub_resource_id': {'key': 'properties.hubResourceId', 'type': 'str'}, - 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, - 'key_vault': {'key': 'properties.keyVault', 'type': 'str'}, - 'key_vaults': {'key': 'properties.keyVaults', 'type': '[str]'}, - 'managed_network': {'key': 'properties.managedNetwork', 'type': 'ManagedNetworkSettings'}, - 'ml_flow_tracking_uri': {'key': 'properties.mlFlowTrackingUri', 'type': 'str'}, - 'notebook_info': {'key': 'properties.notebookInfo', 'type': 'NotebookResourceInfo'}, - 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'}, - 'private_endpoint_connections': {'key': 'properties.privateEndpointConnections', 'type': '[PrivateEndpointConnection]'}, - 'private_link_count': {'key': 'properties.privateLinkCount', 'type': 'int'}, - 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, - 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, - 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'}, - 'service_provisioned_resource_group': {'key': 'properties.serviceProvisionedResourceGroup', 'type': 'str'}, - 'shared_private_link_resources': {'key': 'properties.sharedPrivateLinkResources', 'type': '[SharedPrivateLinkResource]'}, - 'soft_delete_retention_in_days': {'key': 'properties.softDeleteRetentionInDays', 'type': 'int'}, - 'storage_account': {'key': 'properties.storageAccount', 'type': 'str'}, - 'storage_accounts': {'key': 'properties.storageAccounts', 'type': '[str]'}, - 'storage_hns_enabled': {'key': 'properties.storageHnsEnabled', 'type': 'bool'}, - 'system_datastores_auth_mode': {'key': 'properties.systemDatastoresAuthMode', 'type': 'str'}, - 'tenant_id': {'key': 'properties.tenantId', 'type': 'str'}, - 'v1_legacy_mode': {'key': 'properties.v1LegacyMode', 'type': 'bool'}, - 'workspace_hub_config': {'key': 'properties.workspaceHubConfig', 'type': 'WorkspaceHubConfig'}, - 'workspace_id': {'key': 'properties.workspaceId', 'type': 'str'}, - } - - def __init__( - self, - *, - identity: Optional["ManagedServiceIdentity"] = None, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "ml_flow_tracking_uri": {"readonly": True}, + "notebook_info": {"readonly": True}, + "private_endpoint_connections": {"readonly": True}, + "private_link_count": {"readonly": True}, + "provisioning_state": {"readonly": True}, + "service_provisioned_resource_group": {"readonly": True}, + "storage_hns_enabled": {"readonly": True}, + "tenant_id": {"readonly": True}, + "workspace_id": {"readonly": True}, + } + + _attribute_map = { + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "kind": {"key": "kind", "type": "str"}, + "location": {"key": "location", "type": "str"}, + "sku": {"key": "sku", "type": "Sku"}, + "tags": {"key": "tags", "type": "{str}"}, + "allow_public_access_when_behind_vnet": {"key": "properties.allowPublicAccessWhenBehindVnet", "type": "bool"}, + "application_insights": {"key": "properties.applicationInsights", "type": "str"}, + "associated_workspaces": {"key": "properties.associatedWorkspaces", "type": "[str]"}, + "container_registries": {"key": "properties.containerRegistries", "type": "[str]"}, + "container_registry": {"key": "properties.containerRegistry", "type": "str"}, + "description": {"key": "properties.description", "type": "str"}, + "discovery_url": {"key": "properties.discoveryUrl", "type": "str"}, + "enable_data_isolation": {"key": "properties.enableDataIsolation", "type": "bool"}, + "encryption": {"key": "properties.encryption", "type": "EncryptionProperty"}, + "existing_workspaces": {"key": "properties.existingWorkspaces", "type": "[str]"}, + "feature_store_settings": {"key": "properties.featureStoreSettings", "type": "FeatureStoreSettings"}, + "friendly_name": {"key": "properties.friendlyName", "type": "str"}, + "hbi_workspace": {"key": "properties.hbiWorkspace", "type": "bool"}, + "hub_resource_id": {"key": "properties.hubResourceId", "type": "str"}, + "image_build_compute": {"key": "properties.imageBuildCompute", "type": "str"}, + "key_vault": {"key": "properties.keyVault", "type": "str"}, + "key_vaults": {"key": "properties.keyVaults", "type": "[str]"}, + "managed_network": {"key": "properties.managedNetwork", "type": "ManagedNetworkSettings"}, + "ml_flow_tracking_uri": {"key": "properties.mlFlowTrackingUri", "type": "str"}, + "notebook_info": {"key": "properties.notebookInfo", "type": "NotebookResourceInfo"}, + "primary_user_assigned_identity": {"key": "properties.primaryUserAssignedIdentity", "type": "str"}, + "private_endpoint_connections": { + "key": "properties.privateEndpointConnections", + "type": "[PrivateEndpointConnection]", + }, + "private_link_count": {"key": "properties.privateLinkCount", "type": "int"}, + "provisioning_state": {"key": "properties.provisioningState", "type": "str"}, + "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, + "serverless_compute_settings": { + "key": "properties.serverlessComputeSettings", + "type": "ServerlessComputeSettings", + }, + "service_managed_resources_settings": { + "key": "properties.serviceManagedResourcesSettings", + "type": "ServiceManagedResourcesSettings", + }, + "service_provisioned_resource_group": {"key": "properties.serviceProvisionedResourceGroup", "type": "str"}, + "shared_private_link_resources": { + "key": "properties.sharedPrivateLinkResources", + "type": "[SharedPrivateLinkResource]", + }, + "soft_delete_retention_in_days": {"key": "properties.softDeleteRetentionInDays", "type": "int"}, + "storage_account": {"key": "properties.storageAccount", "type": "str"}, + "storage_accounts": {"key": "properties.storageAccounts", "type": "[str]"}, + "storage_hns_enabled": {"key": "properties.storageHnsEnabled", "type": "bool"}, + "system_datastores_auth_mode": {"key": "properties.systemDatastoresAuthMode", "type": "str"}, + "tenant_id": {"key": "properties.tenantId", "type": "str"}, + "v1_legacy_mode": {"key": "properties.v1LegacyMode", "type": "bool"}, + "workspace_hub_config": {"key": "properties.workspaceHubConfig", "type": "WorkspaceHubConfig"}, + "workspace_id": {"key": "properties.workspaceId", "type": "str"}, + } + + def __init__( # pylint: disable=too-many-locals + self, + *, + identity: Optional["_models.ManagedServiceIdentity"] = None, kind: Optional[str] = None, location: Optional[str] = None, - sku: Optional["Sku"] = None, + sku: Optional["_models.Sku"] = None, tags: Optional[Dict[str, str]] = None, allow_public_access_when_behind_vnet: Optional[bool] = None, application_insights: Optional[str] = None, @@ -32894,28 +32778,29 @@ def __init__( description: Optional[str] = None, discovery_url: Optional[str] = None, enable_data_isolation: Optional[bool] = None, - encryption: Optional["EncryptionProperty"] = None, + encryption: Optional["_models.EncryptionProperty"] = None, existing_workspaces: Optional[List[str]] = None, - feature_store_settings: Optional["FeatureStoreSettings"] = None, + feature_store_settings: Optional["_models.FeatureStoreSettings"] = None, friendly_name: Optional[str] = None, hbi_workspace: Optional[bool] = None, hub_resource_id: Optional[str] = None, image_build_compute: Optional[str] = None, key_vault: Optional[str] = None, key_vaults: Optional[List[str]] = None, - managed_network: Optional["ManagedNetworkSettings"] = None, + managed_network: Optional["_models.ManagedNetworkSettings"] = None, primary_user_assigned_identity: Optional[str] = None, - public_network_access: Optional[Union[str, "PublicNetworkAccessType"]] = None, - service_managed_resources_settings: Optional["ServiceManagedResourcesSettings"] = None, - shared_private_link_resources: Optional[List["SharedPrivateLinkResource"]] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccessType"]] = None, + serverless_compute_settings: Optional["_models.ServerlessComputeSettings"] = None, + service_managed_resources_settings: Optional["_models.ServiceManagedResourcesSettings"] = None, + shared_private_link_resources: Optional[List["_models.SharedPrivateLinkResource"]] = None, soft_delete_retention_in_days: Optional[int] = None, storage_account: Optional[str] = None, storage_accounts: Optional[List[str]] = None, system_datastores_auth_mode: Optional[str] = None, v1_legacy_mode: Optional[bool] = None, - workspace_hub_config: Optional["WorkspaceHubConfig"] = None, - **kwargs - ): + workspace_hub_config: Optional["_models.WorkspaceHubConfig"] = None, + **kwargs: Any + ) -> None: """ :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity @@ -32926,7 +32811,7 @@ def __init__( :keyword sku: Optional. This field is required to be implemented by the RP because AML is supporting more than one tier. :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - :keyword tags: A set of tags. Dictionary of :code:``. + :keyword tags: Dictionary of :code:``. :paramtype tags: dict[str, str] :keyword allow_public_access_when_behind_vnet: The flag to indicate whether to allow public access when behind VNet. @@ -32973,10 +32858,13 @@ def __init__( :keyword primary_user_assigned_identity: The user assigned identity resource id that represents the workspace identity. :paramtype primary_user_assigned_identity: str - :keyword public_network_access: Whether requests from Public Network are allowed. Possible - values include: "Enabled", "Disabled". + :keyword public_network_access: Whether requests from Public Network are allowed. Known values + are: "Enabled" and "Disabled". :paramtype public_network_access: str or ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType + :keyword serverless_compute_settings: Settings for serverless compute created in the workspace. + :paramtype serverless_compute_settings: + ~azure.mgmt.machinelearningservices.models.ServerlessComputeSettings :keyword service_managed_resources_settings: The service managed resource settings. :paramtype service_managed_resources_settings: ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings @@ -33001,7 +32889,7 @@ def __init__( :keyword workspace_hub_config: WorkspaceHub's configuration object. :paramtype workspace_hub_config: ~azure.mgmt.machinelearningservices.models.WorkspaceHubConfig """ - super(Workspace, self).__init__(**kwargs) + super().__init__(**kwargs) self.identity = identity self.kind = kind self.location = location @@ -33032,6 +32920,7 @@ def __init__( self.private_link_count = None self.provisioning_state = None self.public_network_access = public_network_access + self.serverless_compute_settings = serverless_compute_settings self.service_managed_resources_settings = service_managed_resources_settings self.service_provisioned_resource_group = None self.shared_private_link_resources = shared_private_link_resources @@ -33046,7 +32935,7 @@ def __init__( self.workspace_id = None -class WorkspaceConnectionAccessKey(msrest.serialization.Model): +class WorkspaceConnectionAccessKey(_serialization.Model): """WorkspaceConnectionAccessKey. :ivar access_key_id: @@ -33056,29 +32945,25 @@ class WorkspaceConnectionAccessKey(msrest.serialization.Model): """ _attribute_map = { - 'access_key_id': {'key': 'accessKeyId', 'type': 'str'}, - 'secret_access_key': {'key': 'secretAccessKey', 'type': 'str'}, + "access_key_id": {"key": "accessKeyId", "type": "str"}, + "secret_access_key": {"key": "secretAccessKey", "type": "str"}, } def __init__( - self, - *, - access_key_id: Optional[str] = None, - secret_access_key: Optional[str] = None, - **kwargs - ): + self, *, access_key_id: Optional[str] = None, secret_access_key: Optional[str] = None, **kwargs: Any + ) -> None: """ :keyword access_key_id: :paramtype access_key_id: str :keyword secret_access_key: :paramtype secret_access_key: str """ - super(WorkspaceConnectionAccessKey, self).__init__(**kwargs) + super().__init__(**kwargs) self.access_key_id = access_key_id self.secret_access_key = secret_access_key -class WorkspaceConnectionApiKey(msrest.serialization.Model): +class WorkspaceConnectionApiKey(_serialization.Model): """Api key object for workspace connection credential. :ivar key: @@ -33086,24 +32971,19 @@ class WorkspaceConnectionApiKey(msrest.serialization.Model): """ _attribute_map = { - 'key': {'key': 'key', 'type': 'str'}, + "key": {"key": "key", "type": "str"}, } - def __init__( - self, - *, - key: Optional[str] = None, - **kwargs - ): + def __init__(self, *, key: Optional[str] = None, **kwargs: Any) -> None: """ :keyword key: :paramtype key: str """ - super(WorkspaceConnectionApiKey, self).__init__(**kwargs) + super().__init__(**kwargs) self.key = key -class WorkspaceConnectionManagedIdentity(msrest.serialization.Model): +class WorkspaceConnectionManagedIdentity(_serialization.Model): """WorkspaceConnectionManagedIdentity. :ivar client_id: @@ -33113,29 +32993,23 @@ class WorkspaceConnectionManagedIdentity(msrest.serialization.Model): """ _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'resource_id': {'key': 'resourceId', 'type': 'str'}, + "client_id": {"key": "clientId", "type": "str"}, + "resource_id": {"key": "resourceId", "type": "str"}, } - def __init__( - self, - *, - client_id: Optional[str] = None, - resource_id: Optional[str] = None, - **kwargs - ): + def __init__(self, *, client_id: Optional[str] = None, resource_id: Optional[str] = None, **kwargs: Any) -> None: """ :keyword client_id: :paramtype client_id: str :keyword resource_id: :paramtype resource_id: str """ - super(WorkspaceConnectionManagedIdentity, self).__init__(**kwargs) + super().__init__(**kwargs) self.client_id = client_id self.resource_id = resource_id -class WorkspaceConnectionPersonalAccessToken(msrest.serialization.Model): +class WorkspaceConnectionPersonalAccessToken(_serialization.Model): """WorkspaceConnectionPersonalAccessToken. :ivar pat: @@ -33143,20 +33017,15 @@ class WorkspaceConnectionPersonalAccessToken(msrest.serialization.Model): """ _attribute_map = { - 'pat': {'key': 'pat', 'type': 'str'}, + "pat": {"key": "pat", "type": "str"}, } - def __init__( - self, - *, - pat: Optional[str] = None, - **kwargs - ): + def __init__(self, *, pat: Optional[str] = None, **kwargs: Any) -> None: """ :keyword pat: :paramtype pat: str """ - super(WorkspaceConnectionPersonalAccessToken, self).__init__(**kwargs) + super().__init__(**kwargs) self.pat = pat @@ -33183,37 +33052,32 @@ class WorkspaceConnectionPropertiesV2BasicResource(Resource): """ _validation = { - 'id': {'readonly': True}, - 'name': {'readonly': True}, - 'type': {'readonly': True}, - 'system_data': {'readonly': True}, - 'properties': {'required': True}, + "id": {"readonly": True}, + "name": {"readonly": True}, + "type": {"readonly": True}, + "system_data": {"readonly": True}, + "properties": {"required": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'name': {'key': 'name', 'type': 'str'}, - 'type': {'key': 'type', 'type': 'str'}, - 'system_data': {'key': 'systemData', 'type': 'SystemData'}, - 'properties': {'key': 'properties', 'type': 'WorkspaceConnectionPropertiesV2'}, + "id": {"key": "id", "type": "str"}, + "name": {"key": "name", "type": "str"}, + "type": {"key": "type", "type": "str"}, + "system_data": {"key": "systemData", "type": "SystemData"}, + "properties": {"key": "properties", "type": "WorkspaceConnectionPropertiesV2"}, } - def __init__( - self, - *, - properties: "WorkspaceConnectionPropertiesV2", - **kwargs - ): + def __init__(self, *, properties: "_models.WorkspaceConnectionPropertiesV2", **kwargs: Any) -> None: """ :keyword properties: Required. :paramtype properties: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2 """ - super(WorkspaceConnectionPropertiesV2BasicResource, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult(msrest.serialization.Model): +class WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult(_serialization.Model): """WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult. :ivar next_link: @@ -33224,17 +33088,17 @@ class WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult(msrest.seri """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[WorkspaceConnectionPropertiesV2BasicResource]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[WorkspaceConnectionPropertiesV2BasicResource]"}, } def __init__( self, *, next_link: Optional[str] = None, - value: Optional[List["WorkspaceConnectionPropertiesV2BasicResource"]] = None, - **kwargs - ): + value: Optional[List["_models.WorkspaceConnectionPropertiesV2BasicResource"]] = None, + **kwargs: Any + ) -> None: """ :keyword next_link: :paramtype next_link: str @@ -33242,12 +33106,12 @@ def __init__( :paramtype value: list[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] """ - super(WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class WorkspaceConnectionServicePrincipal(msrest.serialization.Model): +class WorkspaceConnectionServicePrincipal(_serialization.Model): """WorkspaceConnectionServicePrincipal. :ivar client_id: @@ -33259,9 +33123,9 @@ class WorkspaceConnectionServicePrincipal(msrest.serialization.Model): """ _attribute_map = { - 'client_id': {'key': 'clientId', 'type': 'str'}, - 'client_secret': {'key': 'clientSecret', 'type': 'str'}, - 'tenant_id': {'key': 'tenantId', 'type': 'str'}, + "client_id": {"key": "clientId", "type": "str"}, + "client_secret": {"key": "clientSecret", "type": "str"}, + "tenant_id": {"key": "tenantId", "type": "str"}, } def __init__( @@ -33270,8 +33134,8 @@ def __init__( client_id: Optional[str] = None, client_secret: Optional[str] = None, tenant_id: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword client_id: :paramtype client_id: str @@ -33280,13 +33144,13 @@ def __init__( :keyword tenant_id: :paramtype tenant_id: str """ - super(WorkspaceConnectionServicePrincipal, self).__init__(**kwargs) + super().__init__(**kwargs) self.client_id = client_id self.client_secret = client_secret self.tenant_id = tenant_id -class WorkspaceConnectionSharedAccessSignature(msrest.serialization.Model): +class WorkspaceConnectionSharedAccessSignature(_serialization.Model): """WorkspaceConnectionSharedAccessSignature. :ivar sas: @@ -33294,24 +33158,19 @@ class WorkspaceConnectionSharedAccessSignature(msrest.serialization.Model): """ _attribute_map = { - 'sas': {'key': 'sas', 'type': 'str'}, + "sas": {"key": "sas", "type": "str"}, } - def __init__( - self, - *, - sas: Optional[str] = None, - **kwargs - ): + def __init__(self, *, sas: Optional[str] = None, **kwargs: Any) -> None: """ :keyword sas: :paramtype sas: str """ - super(WorkspaceConnectionSharedAccessSignature, self).__init__(**kwargs) + super().__init__(**kwargs) self.sas = sas -class WorkspaceConnectionUpdateParameter(msrest.serialization.Model): +class WorkspaceConnectionUpdateParameter(_serialization.Model): """The properties that the machine learning workspace connection will be updated with. :ivar properties: The properties that the machine learning workspace connection will be updated @@ -33320,26 +33179,23 @@ class WorkspaceConnectionUpdateParameter(msrest.serialization.Model): """ _attribute_map = { - 'properties': {'key': 'properties', 'type': 'WorkspaceConnectionPropertiesV2'}, + "properties": {"key": "properties", "type": "WorkspaceConnectionPropertiesV2"}, } def __init__( - self, - *, - properties: Optional["WorkspaceConnectionPropertiesV2"] = None, - **kwargs - ): + self, *, properties: Optional["_models.WorkspaceConnectionPropertiesV2"] = None, **kwargs: Any + ) -> None: """ :keyword properties: The properties that the machine learning workspace connection will be updated with. :paramtype properties: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2 """ - super(WorkspaceConnectionUpdateParameter, self).__init__(**kwargs) + super().__init__(**kwargs) self.properties = properties -class WorkspaceConnectionUsernamePassword(msrest.serialization.Model): +class WorkspaceConnectionUsernamePassword(_serialization.Model): """WorkspaceConnectionUsernamePassword. :ivar password: @@ -33349,29 +33205,23 @@ class WorkspaceConnectionUsernamePassword(msrest.serialization.Model): """ _attribute_map = { - 'password': {'key': 'password', 'type': 'str'}, - 'username': {'key': 'username', 'type': 'str'}, + "password": {"key": "password", "type": "str"}, + "username": {"key": "username", "type": "str"}, } - def __init__( - self, - *, - password: Optional[str] = None, - username: Optional[str] = None, - **kwargs - ): + def __init__(self, *, password: Optional[str] = None, username: Optional[str] = None, **kwargs: Any) -> None: """ :keyword password: :paramtype password: str :keyword username: :paramtype username: str """ - super(WorkspaceConnectionUsernamePassword, self).__init__(**kwargs) + super().__init__(**kwargs) self.password = password self.username = username -class WorkspaceHubConfig(msrest.serialization.Model): +class WorkspaceHubConfig(_serialization.Model): """WorkspaceHub's configuration object. :ivar additional_workspace_storage_accounts: @@ -33381,8 +33231,8 @@ class WorkspaceHubConfig(msrest.serialization.Model): """ _attribute_map = { - 'additional_workspace_storage_accounts': {'key': 'additionalWorkspaceStorageAccounts', 'type': '[str]'}, - 'default_workspace_resource_group': {'key': 'defaultWorkspaceResourceGroup', 'type': 'str'}, + "additional_workspace_storage_accounts": {"key": "additionalWorkspaceStorageAccounts", "type": "[str]"}, + "default_workspace_resource_group": {"key": "defaultWorkspaceResourceGroup", "type": "str"}, } def __init__( @@ -33390,20 +33240,20 @@ def __init__( *, additional_workspace_storage_accounts: Optional[List[str]] = None, default_workspace_resource_group: Optional[str] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword additional_workspace_storage_accounts: :paramtype additional_workspace_storage_accounts: list[str] :keyword default_workspace_resource_group: :paramtype default_workspace_resource_group: str """ - super(WorkspaceHubConfig, self).__init__(**kwargs) + super().__init__(**kwargs) self.additional_workspace_storage_accounts = additional_workspace_storage_accounts self.default_workspace_resource_group = default_workspace_resource_group -class WorkspaceListResult(msrest.serialization.Model): +class WorkspaceListResult(_serialization.Model): """The result of a request to list machine learning workspaces. :ivar next_link: The link to the next page constructed using the continuationToken. If null, @@ -33415,17 +33265,13 @@ class WorkspaceListResult(msrest.serialization.Model): """ _attribute_map = { - 'next_link': {'key': 'nextLink', 'type': 'str'}, - 'value': {'key': 'value', 'type': '[Workspace]'}, + "next_link": {"key": "nextLink", "type": "str"}, + "value": {"key": "value", "type": "[Workspace]"}, } def __init__( - self, - *, - next_link: Optional[str] = None, - value: Optional[List["Workspace"]] = None, - **kwargs - ): + self, *, next_link: Optional[str] = None, value: Optional[List["_models.Workspace"]] = None, **kwargs: Any + ) -> None: """ :keyword next_link: The link to the next page constructed using the continuationToken. If null, there are no additional pages. @@ -33434,12 +33280,12 @@ def __init__( nextLink field should be used to request the next list of machine learning workspaces. :paramtype value: list[~azure.mgmt.machinelearningservices.models.Workspace] """ - super(WorkspaceListResult, self).__init__(**kwargs) + super().__init__(**kwargs) self.next_link = next_link self.value = value -class WorkspacePrivateEndpointResource(msrest.serialization.Model): +class WorkspacePrivateEndpointResource(_serialization.Model): """The Private Endpoint resource. Variables are only populated by the server, and will be ignored when sending a request. @@ -33452,27 +33298,23 @@ class WorkspacePrivateEndpointResource(msrest.serialization.Model): """ _validation = { - 'id': {'readonly': True}, - 'subnet_arm_id': {'readonly': True}, + "id": {"readonly": True}, + "subnet_arm_id": {"readonly": True}, } _attribute_map = { - 'id': {'key': 'id', 'type': 'str'}, - 'subnet_arm_id': {'key': 'subnetArmId', 'type': 'str'}, + "id": {"key": "id", "type": "str"}, + "subnet_arm_id": {"key": "subnetArmId", "type": "str"}, } - def __init__( - self, - **kwargs - ): - """ - """ - super(WorkspacePrivateEndpointResource, self).__init__(**kwargs) + def __init__(self, **kwargs: Any) -> None: + """ """ + super().__init__(**kwargs) self.id = None self.subnet_arm_id = None -class WorkspaceUpdateParameters(msrest.serialization.Model): +class WorkspaceUpdateParameters(_serialization.Model): # pylint: disable=too-many-instance-attributes """The parameters for updating a machine learning workspace. :ivar identity: Managed service identity (system assigned and/or user assigned identities). @@ -33480,7 +33322,7 @@ class WorkspaceUpdateParameters(msrest.serialization.Model): :ivar sku: Optional. This field is required to be implemented by the RP because AML is supporting more than one tier. :vartype sku: ~azure.mgmt.machinelearningservices.models.Sku - :ivar tags: A set of tags. The resource tags for the machine learning workspace. + :ivar tags: The resource tags for the machine learning workspace. :vartype tags: dict[str, str] :ivar application_insights: ARM id of the application insights associated with this workspace. :vartype application_insights: str @@ -33504,10 +33346,13 @@ class WorkspaceUpdateParameters(msrest.serialization.Model): :ivar primary_user_assigned_identity: The user assigned identity resource id that represents the workspace identity. :vartype primary_user_assigned_identity: str - :ivar public_network_access: Whether requests from Public Network are allowed. Possible values - include: "Enabled", "Disabled". + :ivar public_network_access: Whether requests from Public Network are allowed. Known values + are: "Enabled" and "Disabled". :vartype public_network_access: str or ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType + :ivar serverless_compute_settings: Settings for serverless compute created in the workspace. + :vartype serverless_compute_settings: + ~azure.mgmt.machinelearningservices.models.ServerlessComputeSettings :ivar service_managed_resources_settings: The service managed resource settings. :vartype service_managed_resources_settings: ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings @@ -33519,54 +33364,62 @@ class WorkspaceUpdateParameters(msrest.serialization.Model): """ _attribute_map = { - 'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'}, - 'sku': {'key': 'sku', 'type': 'Sku'}, - 'tags': {'key': 'tags', 'type': '{str}'}, - 'application_insights': {'key': 'properties.applicationInsights', 'type': 'str'}, - 'container_registry': {'key': 'properties.containerRegistry', 'type': 'str'}, - 'description': {'key': 'properties.description', 'type': 'str'}, - 'enable_data_isolation': {'key': 'properties.enableDataIsolation', 'type': 'bool'}, - 'encryption': {'key': 'properties.encryption', 'type': 'EncryptionUpdateProperties'}, - 'feature_store_settings': {'key': 'properties.featureStoreSettings', 'type': 'FeatureStoreSettings'}, - 'friendly_name': {'key': 'properties.friendlyName', 'type': 'str'}, - 'image_build_compute': {'key': 'properties.imageBuildCompute', 'type': 'str'}, - 'managed_network': {'key': 'properties.managedNetwork', 'type': 'ManagedNetworkSettings'}, - 'primary_user_assigned_identity': {'key': 'properties.primaryUserAssignedIdentity', 'type': 'str'}, - 'public_network_access': {'key': 'properties.publicNetworkAccess', 'type': 'str'}, - 'service_managed_resources_settings': {'key': 'properties.serviceManagedResourcesSettings', 'type': 'ServiceManagedResourcesSettings'}, - 'soft_delete_retention_in_days': {'key': 'properties.softDeleteRetentionInDays', 'type': 'int'}, - 'v1_legacy_mode': {'key': 'properties.v1LegacyMode', 'type': 'bool'}, + "identity": {"key": "identity", "type": "ManagedServiceIdentity"}, + "sku": {"key": "sku", "type": "Sku"}, + "tags": {"key": "tags", "type": "{str}"}, + "application_insights": {"key": "properties.applicationInsights", "type": "str"}, + "container_registry": {"key": "properties.containerRegistry", "type": "str"}, + "description": {"key": "properties.description", "type": "str"}, + "enable_data_isolation": {"key": "properties.enableDataIsolation", "type": "bool"}, + "encryption": {"key": "properties.encryption", "type": "EncryptionUpdateProperties"}, + "feature_store_settings": {"key": "properties.featureStoreSettings", "type": "FeatureStoreSettings"}, + "friendly_name": {"key": "properties.friendlyName", "type": "str"}, + "image_build_compute": {"key": "properties.imageBuildCompute", "type": "str"}, + "managed_network": {"key": "properties.managedNetwork", "type": "ManagedNetworkSettings"}, + "primary_user_assigned_identity": {"key": "properties.primaryUserAssignedIdentity", "type": "str"}, + "public_network_access": {"key": "properties.publicNetworkAccess", "type": "str"}, + "serverless_compute_settings": { + "key": "properties.serverlessComputeSettings", + "type": "ServerlessComputeSettings", + }, + "service_managed_resources_settings": { + "key": "properties.serviceManagedResourcesSettings", + "type": "ServiceManagedResourcesSettings", + }, + "soft_delete_retention_in_days": {"key": "properties.softDeleteRetentionInDays", "type": "int"}, + "v1_legacy_mode": {"key": "properties.v1LegacyMode", "type": "bool"}, } def __init__( self, *, - identity: Optional["ManagedServiceIdentity"] = None, - sku: Optional["Sku"] = None, + identity: Optional["_models.ManagedServiceIdentity"] = None, + sku: Optional["_models.Sku"] = None, tags: Optional[Dict[str, str]] = None, application_insights: Optional[str] = None, container_registry: Optional[str] = None, description: Optional[str] = None, enable_data_isolation: Optional[bool] = None, - encryption: Optional["EncryptionUpdateProperties"] = None, - feature_store_settings: Optional["FeatureStoreSettings"] = None, + encryption: Optional["_models.EncryptionUpdateProperties"] = None, + feature_store_settings: Optional["_models.FeatureStoreSettings"] = None, friendly_name: Optional[str] = None, image_build_compute: Optional[str] = None, - managed_network: Optional["ManagedNetworkSettings"] = None, + managed_network: Optional["_models.ManagedNetworkSettings"] = None, primary_user_assigned_identity: Optional[str] = None, - public_network_access: Optional[Union[str, "PublicNetworkAccessType"]] = None, - service_managed_resources_settings: Optional["ServiceManagedResourcesSettings"] = None, + public_network_access: Optional[Union[str, "_models.PublicNetworkAccessType"]] = None, + serverless_compute_settings: Optional["_models.ServerlessComputeSettings"] = None, + service_managed_resources_settings: Optional["_models.ServiceManagedResourcesSettings"] = None, soft_delete_retention_in_days: Optional[int] = None, v1_legacy_mode: Optional[bool] = None, - **kwargs - ): + **kwargs: Any + ) -> None: """ :keyword identity: Managed service identity (system assigned and/or user assigned identities). :paramtype identity: ~azure.mgmt.machinelearningservices.models.ManagedServiceIdentity :keyword sku: Optional. This field is required to be implemented by the RP because AML is supporting more than one tier. :paramtype sku: ~azure.mgmt.machinelearningservices.models.Sku - :keyword tags: A set of tags. The resource tags for the machine learning workspace. + :keyword tags: The resource tags for the machine learning workspace. :paramtype tags: dict[str, str] :keyword application_insights: ARM id of the application insights associated with this workspace. @@ -33591,10 +33444,13 @@ def __init__( :keyword primary_user_assigned_identity: The user assigned identity resource id that represents the workspace identity. :paramtype primary_user_assigned_identity: str - :keyword public_network_access: Whether requests from Public Network are allowed. Possible - values include: "Enabled", "Disabled". + :keyword public_network_access: Whether requests from Public Network are allowed. Known values + are: "Enabled" and "Disabled". :paramtype public_network_access: str or ~azure.mgmt.machinelearningservices.models.PublicNetworkAccessType + :keyword serverless_compute_settings: Settings for serverless compute created in the workspace. + :paramtype serverless_compute_settings: + ~azure.mgmt.machinelearningservices.models.ServerlessComputeSettings :keyword service_managed_resources_settings: The service managed resource settings. :paramtype service_managed_resources_settings: ~azure.mgmt.machinelearningservices.models.ServiceManagedResourcesSettings @@ -33605,7 +33461,7 @@ def __init__( by the v2 API. :paramtype v1_legacy_mode: bool """ - super(WorkspaceUpdateParameters, self).__init__(**kwargs) + super().__init__(**kwargs) self.identity = identity self.sku = sku self.tags = tags @@ -33620,6 +33476,7 @@ def __init__( self.managed_network = managed_network self.primary_user_assigned_identity = primary_user_assigned_identity self.public_network_access = public_network_access + self.serverless_compute_settings = serverless_compute_settings self.service_managed_resources_settings = service_managed_resources_settings self.soft_delete_retention_in_days = soft_delete_retention_in_days self.v1_legacy_mode = v1_legacy_mode diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_patch.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/models/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/__init__.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/__init__.py index 64d0c00d4f30..d37dfef71298 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/__init__.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/__init__.py @@ -6,100 +6,105 @@ # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from ._usages_operations import UsagesOperations -from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations -from ._quotas_operations import QuotasOperations -from ._compute_operations import ComputeOperations -from ._registries_operations import RegistriesOperations -from ._workspace_features_operations import WorkspaceFeaturesOperations -from ._registry_code_containers_operations import RegistryCodeContainersOperations -from ._registry_code_versions_operations import RegistryCodeVersionsOperations -from ._registry_component_containers_operations import RegistryComponentContainersOperations -from ._registry_component_versions_operations import RegistryComponentVersionsOperations -from ._registry_data_containers_operations import RegistryDataContainersOperations -from ._registry_data_versions_operations import RegistryDataVersionsOperations -from ._registry_environment_containers_operations import RegistryEnvironmentContainersOperations -from ._registry_environment_versions_operations import RegistryEnvironmentVersionsOperations -from ._registry_model_containers_operations import RegistryModelContainersOperations -from ._registry_model_versions_operations import RegistryModelVersionsOperations -from ._batch_endpoints_operations import BatchEndpointsOperations from ._batch_deployments_operations import BatchDeploymentsOperations +from ._batch_endpoints_operations import BatchEndpointsOperations from ._code_containers_operations import CodeContainersOperations from ._code_versions_operations import CodeVersionsOperations from ._component_containers_operations import ComponentContainersOperations from ._component_versions_operations import ComponentVersionsOperations +from ._compute_operations import ComputeOperations from ._data_containers_operations import DataContainersOperations from ._data_versions_operations import DataVersionsOperations from ._datastores_operations import DatastoresOperations from ._environment_containers_operations import EnvironmentContainersOperations from ._environment_versions_operations import EnvironmentVersionsOperations -from ._featureset_containers_operations import FeaturesetContainersOperations from ._features_operations import FeaturesOperations +from ._featureset_containers_operations import FeaturesetContainersOperations from ._featureset_versions_operations import FeaturesetVersionsOperations from ._featurestore_entity_containers_operations import FeaturestoreEntityContainersOperations from ._featurestore_entity_versions_operations import FeaturestoreEntityVersionsOperations from ._jobs_operations import JobsOperations from ._labeling_jobs_operations import LabelingJobsOperations +from ._managed_network_provisions_operations import ManagedNetworkProvisionsOperations +from ._managed_network_settings_rule_operations import ManagedNetworkSettingsRuleOperations from ._model_containers_operations import ModelContainersOperations from ._model_versions_operations import ModelVersionsOperations -from ._online_endpoints_operations import OnlineEndpointsOperations from ._online_deployments_operations import OnlineDeploymentsOperations -from ._schedules_operations import SchedulesOperations -from ._serverless_endpoints_operations import ServerlessEndpointsOperations +from ._online_endpoints_operations import OnlineEndpointsOperations from ._operations import Operations -from ._workspaces_operations import WorkspacesOperations -from ._workspace_connections_operations import WorkspaceConnectionsOperations -from ._managed_network_settings_rule_operations import ManagedNetworkSettingsRuleOperations +from ._patch import * # pylint: disable=unused-wildcard-import +from ._patch import __all__ as _patch_all +from ._patch import patch_sdk as _patch_sdk from ._private_endpoint_connections_operations import PrivateEndpointConnectionsOperations from ._private_link_resources_operations import PrivateLinkResourcesOperations -from ._managed_network_provisions_operations import ManagedNetworkProvisionsOperations +from ._quotas_operations import QuotasOperations +from ._registries_operations import RegistriesOperations +from ._registry_code_containers_operations import RegistryCodeContainersOperations +from ._registry_code_versions_operations import RegistryCodeVersionsOperations +from ._registry_component_containers_operations import RegistryComponentContainersOperations +from ._registry_component_versions_operations import RegistryComponentVersionsOperations +from ._registry_data_containers_operations import RegistryDataContainersOperations +from ._registry_data_versions_operations import RegistryDataVersionsOperations +from ._registry_environment_containers_operations import RegistryEnvironmentContainersOperations +from ._registry_environment_versions_operations import RegistryEnvironmentVersionsOperations +from ._registry_model_containers_operations import RegistryModelContainersOperations +from ._registry_model_versions_operations import RegistryModelVersionsOperations +from ._schedules_operations import SchedulesOperations +from ._serverless_endpoints_operations import ServerlessEndpointsOperations +from ._usages_operations import UsagesOperations +from ._virtual_machine_sizes_operations import VirtualMachineSizesOperations +from ._workspace_connections_operations import WorkspaceConnectionsOperations +from ._workspace_features_operations import WorkspaceFeaturesOperations +from ._workspaces_operations import WorkspacesOperations __all__ = [ - 'UsagesOperations', - 'VirtualMachineSizesOperations', - 'QuotasOperations', - 'ComputeOperations', - 'RegistriesOperations', - 'WorkspaceFeaturesOperations', - 'RegistryCodeContainersOperations', - 'RegistryCodeVersionsOperations', - 'RegistryComponentContainersOperations', - 'RegistryComponentVersionsOperations', - 'RegistryDataContainersOperations', - 'RegistryDataVersionsOperations', - 'RegistryEnvironmentContainersOperations', - 'RegistryEnvironmentVersionsOperations', - 'RegistryModelContainersOperations', - 'RegistryModelVersionsOperations', - 'BatchEndpointsOperations', - 'BatchDeploymentsOperations', - 'CodeContainersOperations', - 'CodeVersionsOperations', - 'ComponentContainersOperations', - 'ComponentVersionsOperations', - 'DataContainersOperations', - 'DataVersionsOperations', - 'DatastoresOperations', - 'EnvironmentContainersOperations', - 'EnvironmentVersionsOperations', - 'FeaturesetContainersOperations', - 'FeaturesOperations', - 'FeaturesetVersionsOperations', - 'FeaturestoreEntityContainersOperations', - 'FeaturestoreEntityVersionsOperations', - 'JobsOperations', - 'LabelingJobsOperations', - 'ModelContainersOperations', - 'ModelVersionsOperations', - 'OnlineEndpointsOperations', - 'OnlineDeploymentsOperations', - 'SchedulesOperations', - 'ServerlessEndpointsOperations', - 'Operations', - 'WorkspacesOperations', - 'WorkspaceConnectionsOperations', - 'ManagedNetworkSettingsRuleOperations', - 'PrivateEndpointConnectionsOperations', - 'PrivateLinkResourcesOperations', - 'ManagedNetworkProvisionsOperations', + "UsagesOperations", + "VirtualMachineSizesOperations", + "QuotasOperations", + "ComputeOperations", + "RegistriesOperations", + "WorkspaceFeaturesOperations", + "RegistryCodeContainersOperations", + "RegistryCodeVersionsOperations", + "RegistryComponentContainersOperations", + "RegistryComponentVersionsOperations", + "RegistryDataContainersOperations", + "RegistryDataVersionsOperations", + "RegistryEnvironmentContainersOperations", + "RegistryEnvironmentVersionsOperations", + "RegistryModelContainersOperations", + "RegistryModelVersionsOperations", + "BatchEndpointsOperations", + "BatchDeploymentsOperations", + "CodeContainersOperations", + "CodeVersionsOperations", + "ComponentContainersOperations", + "ComponentVersionsOperations", + "DataContainersOperations", + "DataVersionsOperations", + "DatastoresOperations", + "EnvironmentContainersOperations", + "EnvironmentVersionsOperations", + "FeaturesetContainersOperations", + "FeaturesOperations", + "FeaturesetVersionsOperations", + "FeaturestoreEntityContainersOperations", + "FeaturestoreEntityVersionsOperations", + "JobsOperations", + "LabelingJobsOperations", + "ModelContainersOperations", + "ModelVersionsOperations", + "OnlineEndpointsOperations", + "OnlineDeploymentsOperations", + "SchedulesOperations", + "ServerlessEndpointsOperations", + "Operations", + "WorkspacesOperations", + "WorkspaceConnectionsOperations", + "ManagedNetworkSettingsRuleOperations", + "PrivateEndpointConnectionsOperations", + "PrivateLinkResourcesOperations", + "ManagedNetworkProvisionsOperations", ] +__all__.extend([p for p in _patch_all if p not in __all__]) +_patch_sdk() diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_batch_deployments_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_batch_deployments_operations.py index 03dd837b325a..b593107c4603 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_batch_deployments_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_batch_deployments_operations.py @@ -6,344 +6,369 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - order_by = kwargs.pop('order_by', None) # type: Optional[str] - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if order_by is not None: - _query_parameters['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") if top is not None: - _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int') + _params["$top"] = _SERIALIZER.query("top", top, "int") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), - "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), - "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url( + "endpoint_name", endpoint_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "deploymentName": _SERIALIZER.url( + "deployment_name", deployment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PATCH", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url( + "endpoint_name", endpoint_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "deploymentName": _SERIALIZER.url( + "deployment_name", deployment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class BatchDeploymentsOperations(object): - """BatchDeploymentsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class BatchDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`batch_deployments` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - order_by=None, # type: Optional[str] - top=None, # type: Optional[int] - skip=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.BatchDeploymentTrackedResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.BatchDeployment"]: """Lists Batch inference deployments in the workspace. Lists Batch inference deployments in the workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Endpoint name. + :param endpoint_name: Endpoint name. Required. :type endpoint_name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Top of list. + :param top: Top of list. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either BatchDeploymentTrackedResourceArmPaginatedResult - or the result of cls(response) + :return: An iterator like instance of either BatchDeployment or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.BatchDeploymentTrackedResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.BatchDeploymentTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchDeploymentTrackedResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - endpoint_name=endpoint_name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -354,16 +379,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("BatchDeploymentTrackedResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -374,87 +398,86 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> LROPoller[None]: """Delete Batch Inference deployment (asynchronous). Delete Batch Inference deployment (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Endpoint name. + :param endpoint_name: Endpoint name. Required. :type endpoint_name: str - :param deployment_name: Inference deployment identifier. + :param deployment_name: Inference deployment identifier. Required. :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -466,100 +489,110 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.BatchDeployment" + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> _models.BatchDeployment: """Gets a batch inference deployment by id. Gets a batch inference deployment by id. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Endpoint name. + :param endpoint_name: Endpoint name. Required. :type endpoint_name: str - :param deployment_name: The identifier for the Batch deployments. + :param deployment_name: The identifier for the Batch deployments. Required. :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: BatchDeployment, or the result of cls(response) + :return: BatchDeployment or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.BatchDeployment - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchDeployment"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.BatchDeployment] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -567,107 +600,221 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('BatchDeployment', pipeline_response) + deserialized = self._deserialize("BatchDeployment", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}" + } def _update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - body, # type: "_models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.BatchDeployment"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.BatchDeployment"]] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, IO], + **kwargs: Any + ) -> Optional[_models.BatchDeployment]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.BatchDeployment]] = kwargs.pop("cls", None) - request = build_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._update_initial.metadata['url'], + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('BatchDeployment', pipeline_response) + deserialized = self._deserialize("BatchDeployment", pipeline_response) if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}" + } + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchDeployment]: + """Update a batch inference deployment (asynchronous). + + Update a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchDeployment]: + """Update a batch inference deployment (asynchronous). + + Update a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_update( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - body, # type: "_models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.BatchDeployment"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties, IO], + **kwargs: Any + ) -> LROPoller[_models.BatchDeployment]: """Update a batch inference deployment (asynchronous). Update a batch inference deployment (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param deployment_name: The identifier for the Batch inference deployment. + :param deployment_name: The identifier for the Batch inference deployment. Required. :type deployment_name: str - :param body: Batch inference deployment definition object. + :param body: Batch inference deployment definition object. Is either a + PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties type or a IO type. Required. :type body: ~azure.mgmt.machinelearningservices.models.PartialBatchDeploymentPartialMinimalTrackedResourceWithProperties + or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -680,17 +827,17 @@ def begin_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchDeployment"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BatchDeployment] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._update_initial( resource_group_name=resource_group_name, @@ -700,122 +847,241 @@ def begin_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('BatchDeployment', pipeline_response) + deserialized = self._deserialize("BatchDeployment", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - body, # type: "_models.BatchDeployment" - **kwargs # type: Any - ): - # type: (...) -> "_models.BatchDeployment" - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchDeployment"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.BatchDeployment, IO], + **kwargs: Any + ) -> _models.BatchDeployment: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'BatchDeployment') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BatchDeployment] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "BatchDeployment") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('BatchDeployment', pipeline_response) + deserialized = self._deserialize("BatchDeployment", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('BatchDeployment', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("BatchDeployment", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}" + } - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.BatchDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchDeployment]: + """Creates/updates a batch inference deployment (asynchronous). + Creates/updates a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchDeployment]: + """Creates/updates a batch inference deployment (asynchronous). + + Creates/updates a batch inference deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The identifier for the Batch inference deployment. Required. + :type deployment_name: str + :param body: Batch inference deployment definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - body, # type: "_models.BatchDeployment" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.BatchDeployment"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.BatchDeployment, IO], + **kwargs: Any + ) -> LROPoller[_models.BatchDeployment]: """Creates/updates a batch inference deployment (asynchronous). Creates/updates a batch inference deployment (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param deployment_name: The identifier for the Batch inference deployment. + :param deployment_name: The identifier for the Batch inference deployment. Required. :type deployment_name: str - :param body: Batch inference deployment definition object. - :type body: ~azure.mgmt.machinelearningservices.models.BatchDeployment + :param body: Batch inference deployment definition object. Is either a BatchDeployment type or + a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchDeployment or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -828,17 +1094,17 @@ def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchDeployment] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchDeployment"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BatchDeployment] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -848,29 +1114,36 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('BatchDeployment', pipeline_response) + deserialized = self._deserialize("BatchDeployment", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/deployments/{deploymentName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_batch_endpoints_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_batch_endpoints_operations.py index 73c6f3dbdad8..6cf425c63366 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_batch_endpoints_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_batch_endpoints_operations.py @@ -6,360 +6,363 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - count = kwargs.pop('count', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if count is not None: - _query_parameters['count'] = _SERIALIZER.query("count", count, 'int') + _params["count"] = _SERIALIZER.query("count", count, "int") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url( + "endpoint_name", endpoint_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PATCH", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url( + "endpoint_name", endpoint_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_list_keys_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class BatchEndpointsOperations(object): - """BatchEndpointsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class BatchEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`batch_endpoints` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - count=None, # type: Optional[int] - skip=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.BatchEndpointTrackedResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.BatchEndpoint"]: """Lists Batch inference endpoint in the workspace. Lists Batch inference endpoint in the workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param count: Number of endpoints to be retrieved in a page of results. + :param count: Number of endpoints to be retrieved in a page of results. Default value is None. :type count: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either BatchEndpointTrackedResourceArmPaginatedResult or - the result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.BatchEndpointTrackedResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either BatchEndpoint or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.BatchEndpointTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchEndpointTrackedResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, count=count, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - count=count, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -370,16 +373,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("BatchEndpointTrackedResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -390,82 +392,83 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> LROPoller[None]: """Delete Batch Inference Endpoint (asynchronous). Delete Batch Inference Endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference Endpoint name. + :param endpoint_name: Inference Endpoint name. Required. :type endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -477,95 +480,106 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.BatchEndpoint" + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.BatchEndpoint: """Gets a batch inference endpoint by name. Gets a batch inference endpoint by name. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Name for the Batch Endpoint. + :param endpoint_name: Name for the Batch Endpoint. Required. :type endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: BatchEndpoint, or the result of cls(response) + :return: BatchEndpoint or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.BatchEndpoint - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchEndpoint"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.BatchEndpoint] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -573,102 +587,207 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('BatchEndpoint', pipeline_response) + deserialized = self._deserialize("BatchEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}" + } def _update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - body, # type: "_models.PartialMinimalTrackedResourceWithIdentity" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.BatchEndpoint"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.BatchEndpoint"]] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], + **kwargs: Any + ) -> Optional[_models.BatchEndpoint]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PartialMinimalTrackedResourceWithIdentity') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.BatchEndpoint]] = kwargs.pop("cls", None) - request = build_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithIdentity") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._update_initial.metadata['url'], + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('BatchEndpoint', pipeline_response) + deserialized = self._deserialize("BatchEndpoint", pipeline_response) if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}" + } + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchEndpoint]: + """Update a batch inference endpoint (asynchronous). + + Update a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Mutable batch inference endpoint definition object. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchEndpoint]: + """Update a batch inference endpoint (asynchronous). + Update a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Mutable batch inference endpoint definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_update( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - body, # type: "_models.PartialMinimalTrackedResourceWithIdentity" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.BatchEndpoint"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], + **kwargs: Any + ) -> LROPoller[_models.BatchEndpoint]: """Update a batch inference endpoint (asynchronous). Update a batch inference endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Name for the Batch inference endpoint. + :param endpoint_name: Name for the Batch inference endpoint. Required. :type endpoint_name: str - :param body: Mutable batch inference endpoint definition object. + :param body: Mutable batch inference endpoint definition object. Is either a + PartialMinimalTrackedResourceWithIdentity type or a IO type. Required. :type body: - ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -680,17 +799,17 @@ def begin_update( :return: An instance of LROPoller that returns either BatchEndpoint or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchEndpoint"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BatchEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._update_initial( resource_group_name=resource_group_name, @@ -699,117 +818,228 @@ def begin_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('BatchEndpoint', pipeline_response) + deserialized = self._deserialize("BatchEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - body, # type: "_models.BatchEndpoint" - **kwargs # type: Any - ): - # type: (...) -> "_models.BatchEndpoint" - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchEndpoint"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.BatchEndpoint, IO], + **kwargs: Any + ) -> _models.BatchEndpoint: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'BatchEndpoint') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BatchEndpoint] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "BatchEndpoint") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('BatchEndpoint', pipeline_response) + deserialized = self._deserialize("BatchEndpoint", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('BatchEndpoint', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("BatchEndpoint", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.BatchEndpoint, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchEndpoint]: + """Creates a batch inference endpoint (asynchronous). + + Creates a batch inference endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Batch inference endpoint definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchEndpoint + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.BatchEndpoint]: + """Creates a batch inference endpoint (asynchronous). - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + Creates a batch inference endpoint (asynchronous). + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Name for the Batch inference endpoint. Required. + :type endpoint_name: str + :param body: Batch inference endpoint definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either BatchEndpoint or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - body, # type: "_models.BatchEndpoint" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.BatchEndpoint"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.BatchEndpoint, IO], + **kwargs: Any + ) -> LROPoller[_models.BatchEndpoint]: """Creates a batch inference endpoint (asynchronous). Creates a batch inference endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Name for the Batch inference endpoint. + :param endpoint_name: Name for the Batch inference endpoint. Required. :type endpoint_name: str - :param body: Batch inference endpoint definition object. - :type body: ~azure.mgmt.machinelearningservices.models.BatchEndpoint + :param body: Batch inference endpoint definition object. Is either a BatchEndpoint type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.BatchEndpoint or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -821,17 +1051,17 @@ def begin_create_or_update( :return: An instance of LROPoller that returns either BatchEndpoint or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.BatchEndpoint] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.BatchEndpoint"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.BatchEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -840,82 +1070,92 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('BatchEndpoint', pipeline_response) + deserialized = self._deserialize("BatchEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}" + } @distributed_trace def list_keys( - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.EndpointAuthKeys" + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.EndpointAuthKeys: """Lists batch Inference Endpoint keys. Lists batch Inference Endpoint keys. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference Endpoint name. + :param endpoint_name: Inference Endpoint name. Required. :type endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EndpointAuthKeys, or the result of cls(response) + :return: EndpointAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EndpointAuthKeys"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EndpointAuthKeys] = kwargs.pop("cls", None) - request = build_list_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_keys.metadata['url'], + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -923,12 +1163,13 @@ def list_keys( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EndpointAuthKeys', pipeline_response) + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys"} # type: ignore - + list_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/batchEndpoints/{endpointName}/listkeys" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_code_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_code_containers_operations.py index 4cb7affd9b71..d5198cbc2b93 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_code_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_code_containers_operations.py @@ -6,269 +6,266 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class CodeContainersOperations(object): - """CodeContainersOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class CodeContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`code_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name, # type: str - workspace_name, # type: str - skip=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.CodeContainerResourceArmPaginatedResult"] + self, resource_group_name: str, workspace_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.CodeContainer"]: """List containers. List containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either CodeContainerResourceArmPaginatedResult or the - result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either CodeContainer or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -279,16 +276,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("CodeContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -299,61 +295,64 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -364,58 +363,60 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}" + } @distributed_trace - def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.CodeContainer" + def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.CodeContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: CodeContainer, or the result of cls(response) + :return: CodeContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -423,72 +424,158 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('CodeContainer', pipeline_response) + deserialized = self._deserialize("CodeContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}" + } + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.CodeContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.CodeContainer" - **kwargs # type: Any - ): - # type: (...) -> "_models.CodeContainer" + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.CodeContainer, IO], + **kwargs: Any + ) -> _models.CodeContainer: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer + :param body: Container entity to create or update. Is either a CodeContainer type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: CodeContainer, or the result of cls(response) + :return: CodeContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CodeContainer] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'CodeContainer') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeContainer") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -497,15 +584,16 @@ def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('CodeContainer', pipeline_response) + deserialized = self._deserialize("CodeContainer", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('CodeContainer', pipeline_response) + deserialized = self._deserialize("CodeContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_code_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_code_versions_operations.py index a5a5782930fe..04757ceaf822 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_code_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_code_versions_operations.py @@ -6,359 +6,353 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - order_by = kwargs.pop('order_by', None) # type: Optional[str] - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[str] - hash = kwargs.pop('hash', None) # type: Optional[str] - hash_version = kwargs.pop('hash_version', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + hash: Optional[str] = None, + hash_version: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if order_by is not None: - _query_parameters['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") if top is not None: - _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int') + _params["$top"] = _SERIALIZER.query("top", top, "int") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if hash is not None: - _query_parameters['hash'] = _SERIALIZER.query("hash", hash, 'str') + _params["hash"] = _SERIALIZER.query("hash", hash, "str") if hash_version is not None: - _query_parameters['hashVersion'] = _SERIALIZER.query("hash_version", hash_version, 'str') + _params["hashVersion"] = _SERIALIZER.query("hash_version", hash_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_get_start_pending_upload_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}/startPendingUpload") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}/startPendingUpload", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class CodeVersionsOperations(object): - """CodeVersionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class CodeVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`code_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - order_by=None, # type: Optional[str] - top=None, # type: Optional[int] - skip=None, # type: Optional[str] - hash=None, # type: Optional[str] - hash_version=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.CodeVersionResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + hash: Optional[str] = None, + hash_version: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.CodeVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param hash: If specified, return CodeVersion assets with specified content hash value, - regardless of name. + regardless of name. Default value is None. :type hash: str - :param hash_version: Hash algorithm version when listing by hash. + :param hash_version: Hash algorithm version when listing by hash. Default value is None. :type hash_version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either CodeVersionResourceArmPaginatedResult or the - result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either CodeVersion or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, hash=hash, hash_version=hash_version, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - name=name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - hash=hash, - hash_version=hash_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -369,16 +363,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("CodeVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -389,65 +382,67 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -458,62 +453,65 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.CodeVersion" + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.CodeVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: CodeVersion, or the result of cls(response) + :return: CodeVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -521,76 +519,168 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('CodeVersion', pipeline_response) + deserialized = self._deserialize("CodeVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}" + } + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.CodeVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.CodeVersion: + """Create or update version. + + Create or update version. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: CodeVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.CodeVersion" - **kwargs # type: Any - ): - # type: (...) -> "_models.CodeVersion" + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.CodeVersion, IO], + **kwargs: Any + ) -> _models.CodeVersion: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion + :param body: Version entity to create or update. Is either a CodeVersion type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: CodeVersion, or the result of cls(response) + :return: CodeVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CodeVersion] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'CodeVersion') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeVersion") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -599,79 +689,171 @@ def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('CodeVersion', pipeline_response) + deserialized = self._deserialize("CodeVersion", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('CodeVersion', pipeline_response) + deserialized = self._deserialize("CodeVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized + return deserialized # type: ignore + + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}" + } - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}"} # type: ignore + @overload + def create_or_get_start_pending_upload( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.PendingUploadRequestDto, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a code asset to. + Generate a storage location and credential for the client to upload a code asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_get_start_pending_upload( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a code asset to. + + Generate a storage location and credential for the client to upload a code asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def create_or_get_start_pending_upload( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.PendingUploadRequestDto" - **kwargs # type: Any - ): - # type: (...) -> "_models.PendingUploadResponseDto" + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.PendingUploadRequestDto, IO], + **kwargs: Any + ) -> _models.PendingUploadResponseDto: """Generate a storage location and credential for the client to upload a code asset to. Generate a storage location and credential for the client to upload a code asset to. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Pending upload request object. - :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto + :param body: Pending upload request object. Is either a PendingUploadRequestDto type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PendingUploadResponseDto, or the result of cls(response) + :return: PendingUploadResponseDto or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PendingUploadResponseDto"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PendingUploadResponseDto] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'PendingUploadRequestDto') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PendingUploadRequestDto") request = build_create_or_get_start_pending_upload_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_get_start_pending_upload.metadata['url'], + content=_content, + template_url=self.create_or_get_start_pending_upload.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -679,12 +861,13 @@ def create_or_get_start_pending_upload( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PendingUploadResponseDto', pipeline_response) + deserialized = self._deserialize("PendingUploadResponseDto", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_get_start_pending_upload.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}/startPendingUpload"} # type: ignore - + create_or_get_start_pending_upload.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/codes/{name}/versions/{version}/startPendingUpload" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_component_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_component_containers_operations.py index 889f4e26862b..9d49b7e36b59 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_component_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_component_containers_operations.py @@ -6,277 +6,284 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class ComponentContainersOperations(object): - """ComponentContainersOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class ComponentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`component_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - skip=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ComponentContainerResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.ComponentContainer"]: """List component containers. List component containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ComponentContainerResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either ComponentContainer or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComponentContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -287,16 +294,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ComponentContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -307,61 +313,64 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -372,58 +381,62 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ComponentContainer" + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.ComponentContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComponentContainer, or the result of cls(response) + :return: ComponentContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -431,72 +444,158 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ComponentContainer', pipeline_response) + deserialized = self._deserialize("ComponentContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}" + } + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ComponentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.ComponentContainer" - **kwargs # type: Any - ): - # type: (...) -> "_models.ComponentContainer" + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.ComponentContainer, IO], + **kwargs: Any + ) -> _models.ComponentContainer: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :param body: Container entity to create or update. Is either a ComponentContainer type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComponentContainer, or the result of cls(response) + :return: ComponentContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComponentContainer] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'ComponentContainer') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentContainer") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -505,15 +604,16 @@ def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('ComponentContainer', pipeline_response) + deserialized = self._deserialize("ComponentContainer", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('ComponentContainer', pipeline_response) + deserialized = self._deserialize("ComponentContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_component_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_component_versions_operations.py index f7275f0022ed..ff9e59b702f4 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_component_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_component_versions_operations.py @@ -6,314 +6,314 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - order_by = kwargs.pop('order_by', None) # type: Optional[str] - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - stage = kwargs.pop('stage', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if order_by is not None: - _query_parameters['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") if top is not None: - _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int') + _params["$top"] = _SERIALIZER.query("top", top, "int") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") if stage is not None: - _query_parameters['stage'] = _SERIALIZER.query("stage", stage, 'str') + _params["stage"] = _SERIALIZER.query("stage", stage, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class ComponentVersionsOperations(object): - """ComponentVersionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class ComponentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`component_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - order_by=None, # type: Optional[str] - top=None, # type: Optional[int] - skip=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - stage=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ComponentVersionResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.ComponentVersion"]: """List component versions. List component versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Component name. + :param name: Component name. Required. :type name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param stage: Component stage. + :param stage: Component stage. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ComponentVersionResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either ComponentVersion or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComponentVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, list_view_type=list_view_type, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - name=name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - list_view_type=list_view_type, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -324,16 +324,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ComponentVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -344,65 +343,67 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -413,62 +414,65 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ComponentVersion" + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.ComponentVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComponentVersion, or the result of cls(response) + :return: ComponentVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -476,76 +480,168 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ComponentVersion', pipeline_response) + deserialized = self._deserialize("ComponentVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}" + } + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.ComponentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ComponentVersion: + """Create or update version. + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ComponentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.ComponentVersion" - **kwargs # type: Any - ): - # type: (...) -> "_models.ComponentVersion" + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.ComponentVersion, IO], + **kwargs: Any + ) -> _models.ComponentVersion: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :param body: Version entity to create or update. Is either a ComponentVersion type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComponentVersion, or the result of cls(response) + :return: ComponentVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ComponentVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComponentVersion] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentVersion") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -554,15 +650,16 @@ def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('ComponentVersion', pipeline_response) + deserialized = self._deserialize("ComponentVersion", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('ComponentVersion', pipeline_response) + deserialized = self._deserialize("ComponentVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/components/{name}/versions/{version}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_compute_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_compute_operations.py index 75a4b670e536..52679378baf2 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_compute_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_compute_operations.py @@ -6,673 +6,674 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, List, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, List, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "computeName": _SERIALIZER.url("compute_name", compute_name, 'str', pattern=r'^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - accept = "application/json" +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "computeName": _SERIALIZER.url("compute_name", compute_name, 'str', pattern=r'^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) -def build_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - accept = "application/json" +def build_update_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "computeName": _SERIALIZER.url("compute_name", compute_name, 'str', pattern=r'^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PATCH", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - underlying_resource_action = kwargs.pop('underlying_resource_action') # type: Union[str, "_models.UnderlyingResourceAction"] +def build_delete_request( + resource_group_name: str, + workspace_name: str, + compute_name: str, + subscription_id: str, + *, + underlying_resource_action: Union[str, _models.UnderlyingResourceAction], + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") - accept = "application/json" # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "computeName": _SERIALIZER.url("compute_name", compute_name, 'str', pattern=r'^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - _query_parameters['underlyingResourceAction'] = _SERIALIZER.query("underlying_resource_action", underlying_resource_action, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + _params["underlyingResourceAction"] = _SERIALIZER.query( + "underlying_resource_action", underlying_resource_action, "str" + ) # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_update_custom_services_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "computeName": _SERIALIZER.url("compute_name", compute_name, 'str', pattern=r'^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_list_nodes_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "computeName": _SERIALIZER.url("compute_name", compute_name, 'str', pattern=r'^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_list_keys_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "computeName": _SERIALIZER.url("compute_name", compute_name, 'str', pattern=r'^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_start_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str +def build_start_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") - accept = "application/json" # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "computeName": _SERIALIZER.url("compute_name", compute_name, 'str', pattern=r'^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_stop_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str +def build_stop_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") - accept = "application/json" # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "computeName": _SERIALIZER.url("compute_name", compute_name, 'str', pattern=r'^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -def build_restart_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str +def build_restart_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") - accept = "application/json" # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "computeName": _SERIALIZER.url("compute_name", compute_name, 'str', pattern=r'^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_update_idle_shutdown_setting_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "computeName": _SERIALIZER.url("compute_name", compute_name, 'str', pattern=r'^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_get_allowed_resize_sizes_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/getAllowedVmSizesForResize") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/getAllowedVmSizesForResize", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "computeName": _SERIALIZER.url("compute_name", compute_name, 'str', pattern=r'^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + +def build_resize_request( + resource_group_name: str, workspace_name: str, compute_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) -def build_resize_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") - accept = "application/json" # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "computeName": _SERIALIZER.url("compute_name", compute_name, 'str', pattern=r'^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "computeName": _SERIALIZER.url( + "compute_name", compute_name, "str", pattern=r"^[a-zA-Z](?![a-zA-Z0-9-]*-\d+$)[a-zA-Z0-9\-]{2,23}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) -# fmt: on -class ComputeOperations(object): # pylint: disable=too-many-public-methods - """ComputeOperations operations. - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. +class ComputeOperations: # pylint: disable=too-many-public-methods + """ + .. warning:: + **DO NOT** instantiate this class directly. - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`compute` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name, # type: str - workspace_name, # type: str - skip=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.PaginatedComputeResourcesList"] + self, resource_group_name: str, workspace_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.ComputeResource"]: """Gets computes in specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PaginatedComputeResourcesList or the result of - cls(response) + :return: An iterator like instance of either ComputeResource or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PaginatedComputeResourcesList] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PaginatedComputeResourcesList] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.PaginatedComputeResourcesList"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -683,16 +684,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("PaginatedComputeResourcesList", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -703,60 +703,63 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ComputeResource" + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> _models.ComputeResource: """Gets compute definition by its name. Any secrets (storage keys, service credentials, etc) are not returned - use 'keys' nested resource to get them. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComputeResource, or the result of cls(response) + :return: ComputeResource or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComputeResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComputeResource] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -764,99 +767,206 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ComputeResource', pipeline_response) + deserialized = self._deserialize("ComputeResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.ComputeResource" - **kwargs # type: Any - ): - # type: (...) -> "_models.ComputeResource" - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ComputeResource, IO], + **kwargs: Any + ) -> _models.ComputeResource: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(parameters, 'ComputeResource') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComputeResource] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "ComputeResource") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ComputeResource', pipeline_response) + deserialized = self._deserialize("ComputeResource", pipeline_response) if response.status_code == 201: - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('ComputeResource', pipeline_response) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ComputeResource", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ComputeResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComputeResource]: + """Creates or updates compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify + that it does not exist yet. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Payload with Machine Learning compute definition. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComputeResource]: + """Creates or updates compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify + that it does not exist yet. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Payload with Machine Learning compute definition. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.ComputeResource" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.ComputeResource"] + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ComputeResource, IO], + **kwargs: Any + ) -> LROPoller[_models.ComputeResource]: """Creates or updates compute. This call will overwrite a compute if it exists. This is a nonrecoverable operation. If your intent is to create a new compute, do a GET first to verify that it does not exist yet. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str - :param parameters: Payload with Machine Learning compute definition. - :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource + :param parameters: Payload with Machine Learning compute definition. Is either a + ComputeResource type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ComputeResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -869,17 +979,17 @@ def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComputeResource] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -888,108 +998,215 @@ def begin_create_or_update( parameters=parameters, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ComputeResource', pipeline_response) + deserialized = self._deserialize("ComputeResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" + } def _update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.ClusterUpdateParameters" - **kwargs # type: Any - ): - # type: (...) -> "_models.ComputeResource" - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ClusterUpdateParameters, IO], + **kwargs: Any + ) -> _models.ComputeResource: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(parameters, 'ClusterUpdateParameters') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComputeResource] = kwargs.pop("cls", None) - request = build_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "ClusterUpdateParameters") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._update_initial.metadata['url'], + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ComputeResource', pipeline_response) + deserialized = self._deserialize("ComputeResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" + } + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ClusterUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComputeResource]: + """Updates properties of a compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Additional parameters for cluster update. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComputeResource]: + """Updates properties of a compute. This call will overwrite a compute if it exists. This is a + nonrecoverable operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: Additional parameters for cluster update. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComputeResource or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_update( self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.ClusterUpdateParameters" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.ComputeResource"] + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ClusterUpdateParameters, IO], + **kwargs: Any + ) -> LROPoller[_models.ComputeResource]: """Updates properties of a compute. This call will overwrite a compute if it exists. This is a nonrecoverable operation. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str - :param parameters: Additional parameters for cluster update. - :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters + :param parameters: Additional parameters for cluster update. Is either a + ClusterUpdateParameters type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ClusterUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -1002,17 +1219,17 @@ def begin_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComputeResource] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeResource"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComputeResource] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._update_initial( resource_group_name=resource_group_name, @@ -1021,106 +1238,121 @@ def begin_update( parameters=parameters, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ComputeResource', pipeline_response) + deserialized = self._deserialize("ComputeResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" + } def _delete_initial( # pylint: disable=inconsistent-return-statements self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - underlying_resource_action, # type: Union[str, "_models.UnderlyingResourceAction"] - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + resource_group_name: str, + workspace_name: str, + compute_name: str, + underlying_resource_action: Union[str, _models.UnderlyingResourceAction], + **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, - api_version=api_version, + subscription_id=self._config.subscription_id, underlying_resource_action=underlying_resource_action, - template_url=self._delete_initial.metadata['url'], + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements + def begin_delete( self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - underlying_resource_action, # type: Union[str, "_models.UnderlyingResourceAction"] - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + resource_group_name: str, + workspace_name: str, + compute_name: str, + underlying_resource_action: Union[str, _models.UnderlyingResourceAction], + **kwargs: Any + ) -> LROPoller[None]: """Deletes specified Machine Learning compute. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :param underlying_resource_action: Delete the underlying compute if 'Delete', or detach the - underlying compute from workspace if 'Detach'. + underlying compute from workspace if 'Detach'. Known values are: "Delete" and "Detach". + Required. :type underlying_resource_action: str or ~azure.mgmt.machinelearningservices.models.UnderlyingResourceAction :keyword callable cls: A custom type or function that will be passed the direct response @@ -1133,101 +1365,188 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, underlying_resource_action=underlying_resource_action, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}" + } - @distributed_trace + @overload def update_custom_services( # pylint: disable=inconsistent-return-statements self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - custom_services, # type: List["_models.CustomService"] - **kwargs # type: Any - ): - # type: (...) -> None + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: List[_models.CustomService], + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: """Updates the custom services list. The list of custom services provided shall be overwritten. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str - :param custom_services: New list of Custom Services. + :param custom_services: New list of Custom Services. Required. :type custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Required. + :type custom_services: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update_custom_services( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + custom_services: Union[List[_models.CustomService], IO], + **kwargs: Any + ) -> None: + """Updates the custom services list. The list of custom services provided shall be overwritten. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param custom_services: New list of Custom Services. Is either a [CustomService] type or a IO + type. Required. + :type custom_services: list[~azure.mgmt.machinelearningservices.models.CustomService] or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) - _json = self._serialize.body(custom_services, '[CustomService]') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(custom_services, (IOBase, bytes)): + _content = custom_services + else: + _json = self._serialize.body(custom_services, "[CustomService]") request = build_update_custom_services_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update_custom_services.metadata['url'], + content=_content, + template_url=self.update_custom_services.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1238,63 +1557,72 @@ def update_custom_services( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - update_custom_services.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices"} # type: ignore - + update_custom_services.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/customServices" + } @distributed_trace def list_nodes( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.AmlComputeNodesInformation"] + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> Iterable["_models.AmlComputeNodeInformation"]: """Get the details (e.g IP address, port etc) of all the compute nodes in the compute. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either AmlComputeNodesInformation or the result of + :return: An iterator like instance of either AmlComputeNodeInformation or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.AmlComputeNodesInformation] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.AmlComputeNodeInformation] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.AmlComputeNodesInformation] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.AmlComputeNodesInformation"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_nodes_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_nodes.metadata['url'], + template_url=self.list_nodes.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_nodes_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - compute_name=compute_name, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -1305,16 +1633,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("AmlComputeNodesInformation", pipeline_response) list_of_elem = deserialized.nodes if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1325,59 +1652,62 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_nodes.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes"} # type: ignore + list_nodes.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listNodes" + } @distributed_trace def list_keys( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ComputeSecrets" + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> _models.ComputeSecrets: """Gets secrets related to Machine Learning compute (storage keys, service credentials, etc). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComputeSecrets, or the result of cls(response) + :return: ComputeSecrets or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComputeSecrets - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComputeSecrets"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComputeSecrets] = kwargs.pop("cls", None) - request = build_list_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_keys.metadata['url'], + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1385,77 +1715,78 @@ def list_keys( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ComputeSecrets', pipeline_response) + deserialized = self._deserialize("ComputeSecrets", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys"} # type: ignore - + list_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/listKeys" + } def _start_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_start_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_start_request( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._start_initial.metadata['url'], + template_url=self._start_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _start_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start"} # type: ignore - + _start_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start" + } @distributed_trace - def begin_start( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_start( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> LROPoller[None]: """Posts a start action to a compute instance. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -1467,107 +1798,113 @@ def begin_start( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._start_initial( + raw_result = self._start_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_start.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start"} # type: ignore + begin_start.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/start" + } def _stop_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_stop_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_stop_request( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._stop_initial.metadata['url'], + template_url=self._stop_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _stop_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop"} # type: ignore - + _stop_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop" + } @distributed_trace - def begin_stop( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_stop( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> LROPoller[None]: """Posts a stop action to a compute instance. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -1579,107 +1916,113 @@ def begin_stop( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._stop_initial( + raw_result = self._stop_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_stop.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop"} # type: ignore + begin_stop.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/stop" + } def _restart_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_restart_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_restart_request( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._restart_initial.metadata['url'], + template_url=self._restart_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) - _restart_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart"} # type: ignore - + _restart_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart" + } @distributed_trace - def begin_restart( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_restart( + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> LROPoller[None]: """Posts a restart action to a compute instance. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -1691,100 +2034,189 @@ def begin_restart( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._restart_initial( + raw_result = self._restart_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_restart.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart"} # type: ignore + begin_restart.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/restart" + } - @distributed_trace + @overload def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.IdleShutdownSetting" - **kwargs # type: Any - ): - # type: (...) -> None + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.IdleShutdownSetting, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: """Updates the idle shutdown setting of a compute instance. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Required. :type parameters: ~azure.mgmt.machinelearningservices.models.IdleShutdownSetting + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.IdleShutdownSetting, IO], + **kwargs: Any + ) -> None: + """Updates the idle shutdown setting of a compute instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating idle shutdown setting of specified ComputeInstance. + Is either a IdleShutdownSetting type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.IdleShutdownSetting or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: None or the result of cls(response) + :rtype: None + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) - _json = self._serialize.body(parameters, 'IdleShutdownSetting') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "IdleShutdownSetting") request = build_update_idle_shutdown_setting_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update_idle_shutdown_setting.metadata['url'], + content=_content, + template_url=self.update_idle_shutdown_setting.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1795,56 +2227,60 @@ def update_idle_shutdown_setting( # pylint: disable=inconsistent-return-stateme if cls: return cls(pipeline_response, None, {}) - update_idle_shutdown_setting.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting"} # type: ignore - + update_idle_shutdown_setting.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/updateIdleShutdownSetting" + } @distributed_trace def get_allowed_resize_sizes( - self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.VirtualMachineSizeListResult" + self, resource_group_name: str, workspace_name: str, compute_name: str, **kwargs: Any + ) -> _models.VirtualMachineSizeListResult: """Returns supported virtual machine sizes for resize. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: VirtualMachineSizeListResult, or the result of cls(response) + :return: VirtualMachineSizeListResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.VirtualMachineSizeListResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineSizeListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.VirtualMachineSizeListResult] = kwargs.pop("cls", None) - request = build_get_allowed_resize_sizes_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_allowed_resize_sizes.metadata['url'], + template_url=self.get_allowed_resize_sizes.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1852,90 +2288,112 @@ def get_allowed_resize_sizes( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response) + deserialized = self._deserialize("VirtualMachineSizeListResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_allowed_resize_sizes.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/getAllowedVmSizesForResize"} # type: ignore - + get_allowed_resize_sizes.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/getAllowedVmSizesForResize" + } def _resize_initial( # pylint: disable=inconsistent-return-statements self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.ResizeSchema" - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ResizeSchema, IO], + **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(parameters, 'ResizeSchema') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_resize_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "ResizeSchema") + + request = build_resize_request( resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._resize_initial.metadata['url'], + content=_content, + template_url=self._resize_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) if cls: return cls(pipeline_response, None, response_headers) - _resize_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize"} # type: ignore - + _resize_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize" + } - @distributed_trace - def begin_resize( # pylint: disable=inconsistent-return-statements + @overload + def begin_resize( self, - resource_group_name, # type: str - workspace_name, # type: str - compute_name, # type: str - parameters, # type: "_models.ResizeSchema" - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: _models.ResizeSchema, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: """Updates the size of a Compute Instance. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param compute_name: Name of the Azure Machine Learning compute. + :param compute_name: Name of the Azure Machine Learning compute. Required. :type compute_name: str :param parameters: The object for updating VM size setting of specified Compute Instance. + Required. :type parameters: ~azure.mgmt.machinelearningservices.models.ResizeSchema + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -1946,45 +2404,127 @@ def begin_resize( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + def begin_resize( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Updates the size of a Compute Instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating VM size setting of specified Compute Instance. + Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_resize( + self, + resource_group_name: str, + workspace_name: str, + compute_name: str, + parameters: Union[_models.ResizeSchema, IO], + **kwargs: Any + ) -> LROPoller[None]: + """Updates the size of a Compute Instance. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param compute_name: Name of the Azure Machine Learning compute. Required. + :type compute_name: str + :param parameters: The object for updating VM size setting of specified Compute Instance. Is + either a ResizeSchema type or a IO type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.ResizeSchema or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._resize_initial( + raw_result = self._resize_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, compute_name=compute_name, parameters=parameters, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_resize.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize"} # type: ignore + begin_resize.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/computes/{computeName}/resize" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_data_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_data_containers_operations.py index be6151d8b59c..5fa3a4620e3b 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_data_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_data_containers_operations.py @@ -6,277 +6,283 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class DataContainersOperations(object): - """DataContainersOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class DataContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`data_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - skip=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.DataContainerResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.DataContainer"]: """List data containers. List data containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataContainerResourceArmPaginatedResult or the - result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.DataContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either DataContainer or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.DataContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -287,16 +293,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("DataContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -307,61 +312,64 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -372,58 +380,60 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}" + } @distributed_trace - def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.DataContainer" + def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.DataContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataContainer, or the result of cls(response) + :return: DataContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -431,72 +441,158 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('DataContainer', pipeline_response) + deserialized = self._deserialize("DataContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}" + } + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.DataContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.DataContainer" - **kwargs # type: Any - ): - # type: (...) -> "_models.DataContainer" + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.DataContainer, IO], + **kwargs: Any + ) -> _models.DataContainer: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.DataContainer + :param body: Container entity to create or update. Is either a DataContainer type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataContainer, or the result of cls(response) + :return: DataContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataContainer] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'DataContainer') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DataContainer") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -505,15 +601,16 @@ def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('DataContainer', pipeline_response) + deserialized = self._deserialize("DataContainer", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('DataContainer', pipeline_response) + deserialized = self._deserialize("DataContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_data_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_data_versions_operations.py index ed94a2ae172d..ce81e9ca0a07 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_data_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_data_versions_operations.py @@ -6,326 +6,326 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - order_by = kwargs.pop('order_by', None) # type: Optional[str] - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[str] - tags = kwargs.pop('tags', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - stage = kwargs.pop('stage', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if order_by is not None: - _query_parameters['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") if top is not None: - _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int') + _params["$top"] = _SERIALIZER.query("top", top, "int") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if tags is not None: - _query_parameters['$tags'] = _SERIALIZER.query("tags", tags, 'str') + _params["$tags"] = _SERIALIZER.query("tags", tags, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") if stage is not None: - _query_parameters['stage'] = _SERIALIZER.query("stage", stage, 'str') + _params["stage"] = _SERIALIZER.query("stage", stage, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class DataVersionsOperations(object): - """DataVersionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class DataVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`data_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - order_by=None, # type: Optional[str] - top=None, # type: Optional[int] - skip=None, # type: Optional[str] - tags=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - stage=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.DataVersionBaseResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.DataVersionBase"]: """List data versions in the data container. List data versions in the data container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Data container's name. + :param name: Data container's name. Required. :type name: str - :param order_by: Please choose OrderBy value from ['createdtime', 'modifiedtime']. + :param order_by: Please choose OrderBy value from ['createdtime', 'modifiedtime']. Default + value is None. :type order_by: str :param top: Top count of results, top count cannot be greater than the page size. If topCount > page size, results with be default page size count - will be returned. + will be returned. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, - ListViewType.All]View type for including/excluding (for example) archived entities. + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param stage: data stage. + :param stage: data stage. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataVersionBaseResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either DataVersionBase or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.DataVersionBaseResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.DataVersionBase] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataVersionBaseResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataVersionBaseResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, tags=tags, list_view_type=list_view_type, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - name=name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - tags=tags, - list_view_type=list_view_type, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -336,16 +336,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("DataVersionBaseResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -356,65 +355,67 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -425,62 +426,65 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.DataVersionBase" + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.DataVersionBase: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataVersionBase, or the result of cls(response) + :return: DataVersionBase or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataVersionBase"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataVersionBase] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -488,76 +492,168 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('DataVersionBase', pipeline_response) + deserialized = self._deserialize("DataVersionBase", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}" + } + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.DataVersionBase, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataVersionBase: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DataVersionBase: + """Create or update version. + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DataVersionBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.DataVersionBase" - **kwargs # type: Any - ): - # type: (...) -> "_models.DataVersionBase" + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.DataVersionBase, IO], + **kwargs: Any + ) -> _models.DataVersionBase: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :param body: Version entity to create or update. Is either a DataVersionBase type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataVersionBase, or the result of cls(response) + :return: DataVersionBase or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataVersionBase"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'DataVersionBase') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataVersionBase] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DataVersionBase") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -566,15 +662,16 @@ def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('DataVersionBase', pipeline_response) + deserialized = self._deserialize("DataVersionBase", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('DataVersionBase', pipeline_response) + deserialized = self._deserialize("DataVersionBase", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/data/{name}/versions/{version}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_datastores_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_datastores_operations.py index d10b164ec9e2..169edd87a00b 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_datastores_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_datastores_operations.py @@ -6,332 +6,334 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, List, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, List, Optional, TypeVar - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - count = kwargs.pop('count', 30) # type: Optional[int] - is_default = kwargs.pop('is_default', None) # type: Optional[bool] - names = kwargs.pop('names', None) # type: Optional[List[str]] - search_text = kwargs.pop('search_text', None) # type: Optional[str] - order_by = kwargs.pop('order_by', None) # type: Optional[str] - order_by_asc = kwargs.pop('order_by_asc', False) # type: Optional[bool] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + count: int = 30, + is_default: Optional[bool] = None, + names: Optional[List[str]] = None, + search_text: Optional[str] = None, + order_by: Optional[str] = None, + order_by_asc: bool = False, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if count is not None: - _query_parameters['count'] = _SERIALIZER.query("count", count, 'int') + _params["count"] = _SERIALIZER.query("count", count, "int") if is_default is not None: - _query_parameters['isDefault'] = _SERIALIZER.query("is_default", is_default, 'bool') + _params["isDefault"] = _SERIALIZER.query("is_default", is_default, "bool") if names is not None: - _query_parameters['names'] = _SERIALIZER.query("names", names, '[str]', div=',') + _params["names"] = _SERIALIZER.query("names", names, "[str]", div=",") if search_text is not None: - _query_parameters['searchText'] = _SERIALIZER.query("search_text", search_text, 'str') + _params["searchText"] = _SERIALIZER.query("search_text", search_text, "str") if order_by is not None: - _query_parameters['orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + _params["orderBy"] = _SERIALIZER.query("order_by", order_by, "str") if order_by_asc is not None: - _query_parameters['orderByAsc'] = _SERIALIZER.query("order_by_asc", order_by_asc, 'bool') + _params["orderByAsc"] = _SERIALIZER.query("order_by_asc", order_by_asc, "bool") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - skip_validation = kwargs.pop('skip_validation', False) # type: Optional[bool] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + skip_validation: bool = False, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip_validation is not None: - _query_parameters['skipValidation'] = _SERIALIZER.query("skip_validation", skip_validation, 'bool') + _params["skipValidation"] = _SERIALIZER.query("skip_validation", skip_validation, "bool") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_list_secrets_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class DatastoresOperations(object): - """DatastoresOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class DatastoresOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`datastores` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - skip=None, # type: Optional[str] - count=30, # type: Optional[int] - is_default=None, # type: Optional[bool] - names=None, # type: Optional[List[str]] - search_text=None, # type: Optional[str] - order_by=None, # type: Optional[str] - order_by_asc=False, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.DatastoreResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + count: int = 30, + is_default: Optional[bool] = None, + names: Optional[List[str]] = None, + search_text: Optional[str] = None, + order_by: Optional[str] = None, + order_by_asc: bool = False, + **kwargs: Any + ) -> Iterable["_models.Datastore"]: """List datastores. List datastores. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param count: Maximum number of results to return. + :param count: Maximum number of results to return. Default value is 30. :type count: int - :param is_default: Filter down to the workspace default datastore. + :param is_default: Filter down to the workspace default datastore. Default value is None. :type is_default: bool - :param names: Names of datastores to return. + :param names: Names of datastores to return. Default value is None. :type names: list[str] - :param search_text: Text to search for in the datastore names. + :param search_text: Text to search for in the datastore names. Default value is None. :type search_text: str - :param order_by: Order by property (createdtime | modifiedtime | name). + :param order_by: Order by property (createdtime | modifiedtime | name). Default value is None. :type order_by: str - :param order_by_asc: Order by property in ascending order. + :param order_by_asc: Order by property in ascending order. Default value is False. :type order_by_asc: bool :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DatastoreResourceArmPaginatedResult or the result - of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.DatastoreResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Datastore or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Datastore] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DatastoreResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.DatastoreResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, count=count, is_default=is_default, @@ -339,26 +341,26 @@ def prepare_request(next_link=None): search_text=search_text, order_by=order_by, order_by_asc=order_by_asc, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - count=count, - is_default=is_default, - names=names, - search_text=search_text, - order_by=order_by, - order_by_asc=order_by_asc, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -369,16 +371,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("DatastoreResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -389,61 +390,64 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: """Delete datastore. Delete datastore. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Datastore name. + :param name: Datastore name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -454,58 +458,60 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}" + } @distributed_trace - def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.Datastore" + def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.Datastore: """Get datastore. Get datastore. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Datastore name. + :param name: Datastore name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Datastore, or the result of cls(response) + :return: Datastore or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Datastore - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Datastore"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Datastore] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -513,76 +519,168 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Datastore', pipeline_response) + deserialized = self._deserialize("Datastore", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}" + } + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.Datastore, + skip_validation: bool = False, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Datastore: + """Create or update datastore. + + Create or update datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :param body: Datastore entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Datastore + :param skip_validation: Flag to skip validation. Default value is False. + :type skip_validation: bool + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + skip_validation: bool = False, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Datastore: + """Create or update datastore. + + Create or update datastore. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Datastore name. Required. + :type name: str + :param body: Datastore entity to create or update. Required. + :type body: IO + :param skip_validation: Flag to skip validation. Default value is False. + :type skip_validation: bool + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Datastore or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Datastore + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.Datastore" - skip_validation=False, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> "_models.Datastore" + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.Datastore, IO], + skip_validation: bool = False, + **kwargs: Any + ) -> _models.Datastore: """Create or update datastore. Create or update datastore. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Datastore name. + :param name: Datastore name. Required. :type name: str - :param body: Datastore entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.Datastore - :param skip_validation: Flag to skip validation. + :param body: Datastore entity to create or update. Is either a Datastore type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.Datastore or IO + :param skip_validation: Flag to skip validation. Default value is False. :type skip_validation: bool + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Datastore, or the result of cls(response) + :return: Datastore or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Datastore - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Datastore"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Datastore] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'Datastore') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Datastore") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, + skip_validation=skip_validation, api_version=api_version, content_type=content_type, json=_json, - skip_validation=skip_validation, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -591,68 +689,72 @@ def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('Datastore', pipeline_response) + deserialized = self._deserialize("Datastore", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('Datastore', pipeline_response) + deserialized = self._deserialize("Datastore", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}" + } @distributed_trace def list_secrets( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.DatastoreSecrets" + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.DatastoreSecrets: """Get datastore secrets. Get datastore secrets. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Datastore name. + :param name: Datastore name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DatastoreSecrets, or the result of cls(response) + :return: DatastoreSecrets or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DatastoreSecrets - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DatastoreSecrets"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DatastoreSecrets] = kwargs.pop("cls", None) - request = build_list_secrets_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_secrets.metadata['url'], + template_url=self.list_secrets.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -660,12 +762,13 @@ def list_secrets( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('DatastoreSecrets', pipeline_response) + deserialized = self._deserialize("DatastoreSecrets", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_secrets.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets"} # type: ignore - + list_secrets.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/datastores/{name}/listSecrets" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_environment_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_environment_containers_operations.py index b97f0bb6c787..f8a4292e2a87 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_environment_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_environment_containers_operations.py @@ -6,277 +6,285 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class EnvironmentContainersOperations(object): - """EnvironmentContainersOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class EnvironmentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`environment_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - skip=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.EnvironmentContainerResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.EnvironmentContainer"]: """List environment containers. List environment containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either EnvironmentContainerResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either EnvironmentContainer or the result of + cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -287,16 +295,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("EnvironmentContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -307,61 +314,64 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -372,58 +382,62 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.EnvironmentContainer" + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.EnvironmentContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EnvironmentContainer, or the result of cls(response) + :return: EnvironmentContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -431,72 +445,158 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}" + } + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.EnvironmentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.EnvironmentContainer" - **kwargs # type: Any - ): - # type: (...) -> "_models.EnvironmentContainer" + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.EnvironmentContainer, IO], + **kwargs: Any + ) -> _models.EnvironmentContainer: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :param body: Container entity to create or update. Is either a EnvironmentContainer type or a + IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EnvironmentContainer, or the result of cls(response) + :return: EnvironmentContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EnvironmentContainer] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'EnvironmentContainer') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentContainer") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -505,15 +605,16 @@ def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_environment_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_environment_versions_operations.py index 7424a9d73878..b4e7cfa97e00 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_environment_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_environment_versions_operations.py @@ -6,315 +6,315 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - order_by = kwargs.pop('order_by', None) # type: Optional[str] - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - stage = kwargs.pop('stage', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if order_by is not None: - _query_parameters['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") if top is not None: - _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int') + _params["$top"] = _SERIALIZER.query("top", top, "int") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") if stage is not None: - _query_parameters['stage'] = _SERIALIZER.query("stage", stage, 'str') + _params["stage"] = _SERIALIZER.query("stage", stage, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class EnvironmentVersionsOperations(object): - """EnvironmentVersionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class EnvironmentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`environment_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - order_by=None, # type: Optional[str] - top=None, # type: Optional[int] - skip=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - stage=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.EnvironmentVersionResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.EnvironmentVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :param stage: Stage for including/excluding (for example) archived entities. Takes priority - over listViewType. + over listViewType. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either EnvironmentVersionResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either EnvironmentVersion or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, list_view_type=list_view_type, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - name=name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - list_view_type=list_view_type, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -325,16 +325,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("EnvironmentVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -345,65 +344,67 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -414,62 +415,65 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.EnvironmentVersion" + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.EnvironmentVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EnvironmentVersion, or the result of cls(response) + :return: EnvironmentVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -477,76 +481,168 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}" + } + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.EnvironmentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentVersion: + """Creates or updates an EnvironmentVersion. + + Creates or updates an EnvironmentVersion. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Name of EnvironmentVersion. This is case-sensitive. Required. + :type name: str + :param version: Version of EnvironmentVersion. Required. + :type version: str + :param body: Definition of EnvironmentVersion. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.EnvironmentVersion: + """Creates or updates an EnvironmentVersion. + Creates or updates an EnvironmentVersion. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Name of EnvironmentVersion. This is case-sensitive. Required. + :type name: str + :param version: Version of EnvironmentVersion. Required. + :type version: str + :param body: Definition of EnvironmentVersion. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: EnvironmentVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.EnvironmentVersion" - **kwargs # type: Any - ): - # type: (...) -> "_models.EnvironmentVersion" + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.EnvironmentVersion, IO], + **kwargs: Any + ) -> _models.EnvironmentVersion: """Creates or updates an EnvironmentVersion. Creates or updates an EnvironmentVersion. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Name of EnvironmentVersion. This is case-sensitive. + :param name: Name of EnvironmentVersion. This is case-sensitive. Required. :type name: str - :param version: Version of EnvironmentVersion. + :param version: Version of EnvironmentVersion. Required. :type version: str - :param body: Definition of EnvironmentVersion. - :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :param body: Definition of EnvironmentVersion. Is either a EnvironmentVersion type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EnvironmentVersion, or the result of cls(response) + :return: EnvironmentVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'EnvironmentVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EnvironmentVersion] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentVersion") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -555,15 +651,16 @@ def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/environments/{name}/versions/{version}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_features_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_features_operations.py index 1ebaf00512c5..be73a726a16a 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_features_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_features_operations.py @@ -6,248 +6,263 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - featureset_name, # type: str - featureset_version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - tags = kwargs.pop('tags', None) # type: Optional[str] - feature_name = kwargs.pop('feature_name', None) # type: Optional[str] - description = kwargs.pop('description', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - page_size = kwargs.pop('page_size', 1000) # type: Optional[int] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + featureset_name: str, + featureset_version: str, + subscription_id: str, + *, + skip: Optional[str] = None, + tags: Optional[str] = None, + feature_name: Optional[str] = None, + description: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 1000, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "featuresetName": _SERIALIZER.url("featureset_name", featureset_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "featuresetVersion": _SERIALIZER.url("featureset_version", featureset_version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "featuresetName": _SERIALIZER.url( + "featureset_name", featureset_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "featuresetVersion": _SERIALIZER.url("featureset_version", featureset_version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if tags is not None: - _query_parameters['tags'] = _SERIALIZER.query("tags", tags, 'str') + _params["tags"] = _SERIALIZER.query("tags", tags, "str") if feature_name is not None: - _query_parameters['featureName'] = _SERIALIZER.query("feature_name", feature_name, 'str') + _params["featureName"] = _SERIALIZER.query("feature_name", feature_name, "str") if description is not None: - _query_parameters['description'] = _SERIALIZER.query("description", description, 'str') + _params["description"] = _SERIALIZER.query("description", description, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") if page_size is not None: - _query_parameters['pageSize'] = _SERIALIZER.query("page_size", page_size, 'int') + _params["pageSize"] = _SERIALIZER.query("page_size", page_size, "int") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - featureset_name, # type: str - featureset_version, # type: str - feature_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + featureset_name: str, + featureset_version: str, + feature_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features/{featureName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features/{featureName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "featuresetName": _SERIALIZER.url("featureset_name", featureset_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "featuresetVersion": _SERIALIZER.url("featureset_version", featureset_version, 'str'), - "featureName": _SERIALIZER.url("feature_name", feature_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "featuresetName": _SERIALIZER.url( + "featureset_name", featureset_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "featuresetVersion": _SERIALIZER.url("featureset_version", featureset_version, "str"), + "featureName": _SERIALIZER.url( + "feature_name", feature_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class FeaturesOperations(object): - """FeaturesOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class FeaturesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`features` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - featureset_name, # type: str - featureset_version, # type: str - skip=None, # type: Optional[str] - tags=None, # type: Optional[str] - feature_name=None, # type: Optional[str] - description=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - page_size=1000, # type: Optional[int] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.FeatureResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + featureset_name: str, + featureset_version: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + feature_name: Optional[str] = None, + description: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 1000, + **kwargs: Any + ) -> Iterable["_models.Feature"]: """List Features. List Features. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param featureset_name: Featureset name. This is case-sensitive. + :param featureset_name: Featureset name. This is case-sensitive. Required. :type featureset_name: str - :param featureset_version: Featureset Version identifier. This is case-sensitive. + :param featureset_version: Featureset Version identifier. This is case-sensitive. Required. :type featureset_version: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str - :param feature_name: feature name. + :param feature_name: feature name. Default value is None. :type feature_name: str - :param description: Description of the featureset. + :param description: Description of the featureset. Default value is None. :type description: str :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, - ListViewType.All]View type for including/excluding (for example) archived entities. + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param page_size: Page size. + :param page_size: Page size. Default value is 1000. :type page_size: int :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FeatureResourceArmPaginatedResult or the result of - cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.FeatureResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Feature or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Feature] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeatureResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeatureResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, featureset_name=featureset_name, featureset_version=featureset_version, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, tags=tags, feature_name=feature_name, description=description, list_view_type=list_view_type, page_size=page_size, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - featureset_name=featureset_name, - featureset_version=featureset_version, - api_version=api_version, - skip=skip, - tags=tags, - feature_name=feature_name, - description=description, - list_view_type=list_view_type, - page_size=page_size, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -258,16 +273,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("FeatureResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -278,69 +292,76 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features" + } @distributed_trace def get( self, - resource_group_name, # type: str - workspace_name, # type: str - featureset_name, # type: str - featureset_version, # type: str - feature_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.Feature" + resource_group_name: str, + workspace_name: str, + featureset_name: str, + featureset_version: str, + feature_name: str, + **kwargs: Any + ) -> _models.Feature: """Get feature. Get feature. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param featureset_name: Feature set name. This is case-sensitive. + :param featureset_name: Feature set name. This is case-sensitive. Required. :type featureset_name: str - :param featureset_version: Feature set version identifier. This is case-sensitive. + :param featureset_version: Feature set version identifier. This is case-sensitive. Required. :type featureset_version: str - :param feature_name: Feature Name. This is case-sensitive. + :param feature_name: Feature Name. This is case-sensitive. Required. :type feature_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Feature, or the result of cls(response) + :return: Feature or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Feature - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Feature"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Feature] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, featureset_name=featureset_name, featureset_version=featureset_version, feature_name=feature_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -348,12 +369,13 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Feature', pipeline_response) + deserialized = self._deserialize("Feature", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features/{featureName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{featuresetName}/versions/{featuresetVersion}/features/{featureName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featureset_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featureset_containers_operations.py index 92c60de052a5..b090326de315 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featureset_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featureset_containers_operations.py @@ -6,294 +6,296 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - tags = kwargs.pop('tags', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - page_size = kwargs.pop('page_size', 20) # type: Optional[int] - name = kwargs.pop('name', None) # type: Optional[str] - description = kwargs.pop('description', None) # type: Optional[str] - created_by = kwargs.pop('created_by', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + name: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if tags is not None: - _query_parameters['tags'] = _SERIALIZER.query("tags", tags, 'str') + _params["tags"] = _SERIALIZER.query("tags", tags, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") if page_size is not None: - _query_parameters['pageSize'] = _SERIALIZER.query("page_size", page_size, 'int') + _params["pageSize"] = _SERIALIZER.query("page_size", page_size, "int") if name is not None: - _query_parameters['name'] = _SERIALIZER.query("name", name, 'str') + _params["name"] = _SERIALIZER.query("name", name, "str") if description is not None: - _query_parameters['description'] = _SERIALIZER.query("description", description, 'str') + _params["description"] = _SERIALIZER.query("description", description, "str") if created_by is not None: - _query_parameters['createdBy'] = _SERIALIZER.query("created_by", created_by, 'str') + _params["createdBy"] = _SERIALIZER.query("created_by", created_by, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_entity_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class FeaturesetContainersOperations(object): - """FeaturesetContainersOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class FeaturesetContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`featureset_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - skip=None, # type: Optional[str] - tags=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - page_size=20, # type: Optional[int] - name=None, # type: Optional[str] - description=None, # type: Optional[str] - created_by=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.FeaturesetContainerResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + name: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.FeaturesetContainer"]: """List featurestore entity containers. List featurestore entity containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, - ListViewType.All]View type for including/excluding (for example) archived entities. + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param page_size: page size. + :param page_size: page size. Default value is 20. :type page_size: int - :param name: name for the featureset. + :param name: name for the featureset. Default value is None. :type name: str - :param description: description for the feature set. + :param description: description for the feature set. Default value is None. :type description: str - :param created_by: createdBy user name. + :param created_by: createdBy user name. Default value is None. :type created_by: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FeaturesetContainerResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either FeaturesetContainer or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.FeaturesetContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, tags=tags, list_view_type=list_view_type, @@ -301,26 +303,26 @@ def prepare_request(next_link=None): name=name, description=description, created_by=created_by, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - tags=tags, - list_view_type=list_view_type, - page_size=page_size, - name=name, - description=description, - created_by=created_by, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -331,16 +333,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("FeaturesetContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -351,82 +352,81 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> LROPoller[None]: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -438,95 +438,106 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } @distributed_trace def get_entity( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.FeaturesetContainer" + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.FeaturesetContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: FeaturesetContainer, or the result of cls(response) + :return: FeaturesetContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.FeaturesetContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetContainer] = kwargs.pop("cls", None) - request = build_get_entity_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_entity.metadata['url'], + template_url=self.get_entity.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -534,100 +545,209 @@ def get_entity( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('FeaturesetContainer', pipeline_response) + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_entity.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}"} # type: ignore - + get_entity.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.FeaturesetContainer" - **kwargs # type: Any - ): - # type: (...) -> "_models.FeaturesetContainer" - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetContainer"] + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.FeaturesetContainer, IO], + **kwargs: Any + ) -> _models.FeaturesetContainer: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'FeaturesetContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetContainer] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturesetContainer") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('FeaturesetContainer', pipeline_response) + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('FeaturesetContainer', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.FeaturesetContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturesetContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}"} # type: ignore + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturesetContainer]: + """Create or update container. + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.FeaturesetContainer" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.FeaturesetContainer"] + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.FeaturesetContainer, IO], + **kwargs: Any + ) -> LROPoller[_models.FeaturesetContainer]: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetContainer + :param body: Container entity to create or update. Is either a FeaturesetContainer type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -640,17 +760,17 @@ def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetContainer] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetContainer"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetContainer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -659,29 +779,36 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('FeaturesetContainer', pipeline_response) + deserialized = self._deserialize("FeaturesetContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featureset_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featureset_versions_operations.py index 4ba6ac614859..60c610d2db85 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featureset_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featureset_versions_operations.py @@ -6,362 +6,357 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - tags = kwargs.pop('tags', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - page_size = kwargs.pop('page_size', 20) # type: Optional[int] - version_name = kwargs.pop('version_name', None) # type: Optional[str] - version = kwargs.pop('version', None) # type: Optional[str] - description = kwargs.pop('description', None) # type: Optional[str] - created_by = kwargs.pop('created_by', None) # type: Optional[str] - stage = kwargs.pop('stage', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + version_name: Optional[str] = None, + version: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + stage: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if tags is not None: - _query_parameters['tags'] = _SERIALIZER.query("tags", tags, 'str') + _params["tags"] = _SERIALIZER.query("tags", tags, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") if page_size is not None: - _query_parameters['pageSize'] = _SERIALIZER.query("page_size", page_size, 'int') + _params["pageSize"] = _SERIALIZER.query("page_size", page_size, "int") if version_name is not None: - _query_parameters['versionName'] = _SERIALIZER.query("version_name", version_name, 'str') + _params["versionName"] = _SERIALIZER.query("version_name", version_name, "str") if version is not None: - _query_parameters['version'] = _SERIALIZER.query("version", version, 'str') + _params["version"] = _SERIALIZER.query("version", version, "str") if description is not None: - _query_parameters['description'] = _SERIALIZER.query("description", description, 'str') + _params["description"] = _SERIALIZER.query("description", description, "str") if created_by is not None: - _query_parameters['createdBy'] = _SERIALIZER.query("created_by", created_by, 'str') + _params["createdBy"] = _SERIALIZER.query("created_by", created_by, "str") if stage is not None: - _query_parameters['stage'] = _SERIALIZER.query("stage", stage, 'str') + _params["stage"] = _SERIALIZER.query("stage", stage, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_backfill_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_backfill_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class FeaturesetVersionsOperations(object): - """FeaturesetVersionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class FeaturesetVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`featureset_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - skip=None, # type: Optional[str] - tags=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - page_size=20, # type: Optional[int] - version_name=None, # type: Optional[str] - version=None, # type: Optional[str] - description=None, # type: Optional[str] - created_by=None, # type: Optional[str] - stage=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.FeaturesetVersionResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + name: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + version_name: Optional[str] = None, + version: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + stage: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.FeaturesetVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Featureset name. This is case-sensitive. + :param name: Featureset name. This is case-sensitive. Required. :type name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, - ListViewType.All]View type for including/excluding (for example) archived entities. + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param page_size: page size. + :param page_size: page size. Default value is 20. :type page_size: int - :param version_name: name for the featureset version. + :param version_name: name for the featureset version. Default value is None. :type version_name: str - :param version: featureset version. + :param version: featureset version. Default value is None. :type version: str - :param description: description for the feature set version. + :param description: description for the feature set version. Default value is None. :type description: str - :param created_by: createdBy user name. + :param created_by: createdBy user name. Default value is None. :type created_by: str - :param stage: Specifies the featurestore stage. + :param stage: Specifies the featurestore stage. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either FeaturesetVersionResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either FeaturesetVersion or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.FeaturesetVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, tags=tags, list_view_type=list_view_type, @@ -371,29 +366,26 @@ def prepare_request(next_link=None): description=description, created_by=created_by, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - name=name, - api_version=api_version, - skip=skip, - tags=tags, - list_view_type=list_view_type, - page_size=page_size, - version_name=version_name, - version=version, - description=description, - created_by=created_by, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -404,16 +396,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("FeaturesetVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -424,87 +415,86 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> LROPoller[None]: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -516,100 +506,110 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.FeaturesetVersion" + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.FeaturesetVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: FeaturesetVersion, or the result of cls(response) + :return: FeaturesetVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.FeaturesetVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturesetVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -617,105 +617,220 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('FeaturesetVersion', pipeline_response) + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.FeaturesetVersion" - **kwargs # type: Any - ): - # type: (...) -> "_models.FeaturesetVersion" - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetVersion"] + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturesetVersion, IO], + **kwargs: Any + ) -> _models.FeaturesetVersion: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'FeaturesetVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetVersion] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturesetVersion") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('FeaturesetVersion', pipeline_response) + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('FeaturesetVersion', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}"} # type: ignore + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.FeaturesetVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturesetVersion]: + """Create or update version. + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturesetVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.FeaturesetVersion" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.FeaturesetVersion"] + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturesetVersion, IO], + **kwargs: Any + ) -> LROPoller[_models.FeaturesetVersion]: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersion + :param body: Version entity to create or update. Is either a FeaturesetVersion type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -728,17 +843,17 @@ def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersion] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetVersion"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetVersion] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -748,122 +863,238 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('FeaturesetVersion', pipeline_response) + deserialized = self._deserialize("FeaturesetVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}" + } def _backfill_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.FeaturesetVersionBackfillRequest" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.FeaturesetVersionBackfillResponse"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.FeaturesetVersionBackfillResponse"]] + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturesetVersionBackfillRequest, IO], + **kwargs: Any + ) -> Optional[_models.FeaturesetVersionBackfillResponse]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'FeaturesetVersionBackfillRequest') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.FeaturesetVersionBackfillResponse]] = kwargs.pop("cls", None) - request = build_backfill_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturesetVersionBackfillRequest") + + request = build_backfill_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._backfill_initial.metadata['url'], + content=_content, + template_url=self._backfill_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('FeaturesetVersionBackfillResponse', pipeline_response) + deserialized = self._deserialize("FeaturesetVersionBackfillResponse", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _backfill_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill"} # type: ignore + _backfill_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill" + } + + @overload + def begin_backfill( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.FeaturesetVersionBackfillRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturesetVersionBackfillResponse]: + """Backfill. + Backfill. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Feature set version backfill request entity. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetVersionBackfillResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_backfill( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturesetVersionBackfillResponse]: + """Backfill. + + Backfill. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Feature set version backfill request entity. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturesetVersionBackfillResponse or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_backfill( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.FeaturesetVersionBackfillRequest" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.FeaturesetVersionBackfillResponse"] + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturesetVersionBackfillRequest, IO], + **kwargs: Any + ) -> LROPoller[_models.FeaturesetVersionBackfillResponse]: """Backfill. Backfill. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Feature set version backfill request entity. - :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillRequest + :param body: Feature set version backfill request entity. Is either a + FeaturesetVersionBackfillRequest type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -876,17 +1107,17 @@ def begin_backfill( result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturesetVersionBackfillResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturesetVersionBackfillResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturesetVersionBackfillResponse] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._backfill_initial( resource_group_name=resource_group_name, @@ -896,29 +1127,36 @@ def begin_backfill( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('FeaturesetVersionBackfillResponse', pipeline_response) + deserialized = self._deserialize("FeaturesetVersionBackfillResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_backfill.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill"} # type: ignore + begin_backfill.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featuresets/{name}/versions/{version}/backfill" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featurestore_entity_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featurestore_entity_containers_operations.py index d783d8af54db..b5478b992e65 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featurestore_entity_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featurestore_entity_containers_operations.py @@ -6,294 +6,297 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - tags = kwargs.pop('tags', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - page_size = kwargs.pop('page_size', 20) # type: Optional[int] - name = kwargs.pop('name', None) # type: Optional[str] - description = kwargs.pop('description', None) # type: Optional[str] - created_by = kwargs.pop('created_by', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + name: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if tags is not None: - _query_parameters['tags'] = _SERIALIZER.query("tags", tags, 'str') + _params["tags"] = _SERIALIZER.query("tags", tags, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") if page_size is not None: - _query_parameters['pageSize'] = _SERIALIZER.query("page_size", page_size, 'int') + _params["pageSize"] = _SERIALIZER.query("page_size", page_size, "int") if name is not None: - _query_parameters['name'] = _SERIALIZER.query("name", name, 'str') + _params["name"] = _SERIALIZER.query("name", name, "str") if description is not None: - _query_parameters['description'] = _SERIALIZER.query("description", description, 'str') + _params["description"] = _SERIALIZER.query("description", description, "str") if created_by is not None: - _query_parameters['createdBy'] = _SERIALIZER.query("created_by", created_by, 'str') + _params["createdBy"] = _SERIALIZER.query("created_by", created_by, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_entity_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class FeaturestoreEntityContainersOperations(object): - """FeaturestoreEntityContainersOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class FeaturestoreEntityContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`featurestore_entity_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - skip=None, # type: Optional[str] - tags=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - page_size=20, # type: Optional[int] - name=None, # type: Optional[str] - description=None, # type: Optional[str] - created_by=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.FeaturestoreEntityContainerResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + name: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.FeaturestoreEntityContainer"]: """List featurestore entity containers. List featurestore entity containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, - ListViewType.All]View type for including/excluding (for example) archived entities. + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param page_size: page size. + :param page_size: page size. Default value is 20. :type page_size: int - :param name: name for the featurestore entity. + :param name: name for the featurestore entity. Default value is None. :type name: str - :param description: description for the featurestore entity. + :param description: description for the featurestore entity. Default value is None. :type description: str - :param created_by: createdBy user name. + :param created_by: createdBy user name. Default value is None. :type created_by: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either - FeaturestoreEntityContainerResourceArmPaginatedResult or the result of cls(response) + :return: An iterator like instance of either FeaturestoreEntityContainer or the result of + cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, tags=tags, list_view_type=list_view_type, @@ -301,26 +304,26 @@ def prepare_request(next_link=None): name=name, description=description, created_by=created_by, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - tags=tags, - list_view_type=list_view_type, - page_size=page_size, - name=name, - description=description, - created_by=created_by, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -331,16 +334,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("FeaturestoreEntityContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -351,82 +353,81 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> LROPoller[None]: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -438,95 +439,106 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } @distributed_trace def get_entity( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.FeaturestoreEntityContainer" + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.FeaturestoreEntityContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: FeaturestoreEntityContainer, or the result of cls(response) + :return: FeaturestoreEntityContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityContainer] = kwargs.pop("cls", None) - request = build_get_entity_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_entity.metadata['url'], + template_url=self.get_entity.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -534,100 +546,209 @@ def get_entity( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('FeaturestoreEntityContainer', pipeline_response) + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_entity.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore - + get_entity.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.FeaturestoreEntityContainer" - **kwargs # type: Any - ): - # type: (...) -> "_models.FeaturestoreEntityContainer" - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityContainer"] + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.FeaturestoreEntityContainer, IO], + **kwargs: Any + ) -> _models.FeaturestoreEntityContainer: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'FeaturestoreEntityContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityContainer] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturestoreEntityContainer") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('FeaturestoreEntityContainer', pipeline_response) + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('FeaturestoreEntityContainer', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.FeaturestoreEntityContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturestoreEntityContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturestoreEntityContainer or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturestoreEntityContainer]: + """Create or update container. + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturestoreEntityContainer or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.FeaturestoreEntityContainer" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.FeaturestoreEntityContainer"] + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.FeaturestoreEntityContainer, IO], + **kwargs: Any + ) -> LROPoller[_models.FeaturestoreEntityContainer]: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer + :param body: Container entity to create or update. Is either a FeaturestoreEntityContainer type + or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -640,17 +761,17 @@ def begin_create_or_update( of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityContainer] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityContainer"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityContainer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -659,29 +780,36 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('FeaturestoreEntityContainer', pipeline_response) + deserialized = self._deserialize("FeaturestoreEntityContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featurestore_entity_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featurestore_entity_versions_operations.py index 52cadfd8c150..a4e12d6588cb 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featurestore_entity_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_featurestore_entity_versions_operations.py @@ -6,318 +6,318 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - tags = kwargs.pop('tags', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - page_size = kwargs.pop('page_size', 20) # type: Optional[int] - version_name = kwargs.pop('version_name', None) # type: Optional[str] - version = kwargs.pop('version', None) # type: Optional[str] - description = kwargs.pop('description', None) # type: Optional[str] - created_by = kwargs.pop('created_by', None) # type: Optional[str] - stage = kwargs.pop('stage', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + version_name: Optional[str] = None, + version: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + stage: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if tags is not None: - _query_parameters['tags'] = _SERIALIZER.query("tags", tags, 'str') + _params["tags"] = _SERIALIZER.query("tags", tags, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") if page_size is not None: - _query_parameters['pageSize'] = _SERIALIZER.query("page_size", page_size, 'int') + _params["pageSize"] = _SERIALIZER.query("page_size", page_size, "int") if version_name is not None: - _query_parameters['versionName'] = _SERIALIZER.query("version_name", version_name, 'str') + _params["versionName"] = _SERIALIZER.query("version_name", version_name, "str") if version is not None: - _query_parameters['version'] = _SERIALIZER.query("version", version, 'str') + _params["version"] = _SERIALIZER.query("version", version, "str") if description is not None: - _query_parameters['description'] = _SERIALIZER.query("description", description, 'str') + _params["description"] = _SERIALIZER.query("description", description, "str") if created_by is not None: - _query_parameters['createdBy'] = _SERIALIZER.query("created_by", created_by, 'str') + _params["createdBy"] = _SERIALIZER.query("created_by", created_by, "str") if stage is not None: - _query_parameters['stage'] = _SERIALIZER.query("stage", stage, 'str') + _params["stage"] = _SERIALIZER.query("stage", stage, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class FeaturestoreEntityVersionsOperations(object): - """FeaturestoreEntityVersionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class FeaturestoreEntityVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`featurestore_entity_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - skip=None, # type: Optional[str] - tags=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - page_size=20, # type: Optional[int] - version_name=None, # type: Optional[str] - version=None, # type: Optional[str] - description=None, # type: Optional[str] - created_by=None, # type: Optional[str] - stage=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.FeaturestoreEntityVersionResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + name: str, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + page_size: int = 20, + version_name: Optional[str] = None, + version: Optional[str] = None, + description: Optional[str] = None, + created_by: Optional[str] = None, + stage: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.FeaturestoreEntityVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Feature entity name. This is case-sensitive. + :param name: Feature entity name. This is case-sensitive. Required. :type name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, - ListViewType.All]View type for including/excluding (for example) archived entities. + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param page_size: page size. + :param page_size: page size. Default value is 20. :type page_size: int - :param version_name: name for the featurestore entity version. + :param version_name: name for the featurestore entity version. Default value is None. :type version_name: str - :param version: featurestore entity version. + :param version: featurestore entity version. Default value is None. :type version: str - :param description: description for the feature entity version. + :param description: description for the feature entity version. Default value is None. :type description: str - :param created_by: createdBy user name. + :param created_by: createdBy user name. Default value is None. :type created_by: str - :param stage: Specifies the featurestore stage. + :param stage: Specifies the featurestore stage. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either - FeaturestoreEntityVersionResourceArmPaginatedResult or the result of cls(response) + :return: An iterator like instance of either FeaturestoreEntityVersion or the result of + cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, tags=tags, list_view_type=list_view_type, @@ -327,29 +327,26 @@ def prepare_request(next_link=None): description=description, created_by=created_by, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - name=name, - api_version=api_version, - skip=skip, - tags=tags, - list_view_type=list_view_type, - page_size=page_size, - version_name=version_name, - version=version, - description=description, - created_by=created_by, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -360,16 +357,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("FeaturestoreEntityVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -380,87 +376,86 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> LROPoller[None]: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -472,100 +467,110 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.FeaturestoreEntityVersion" + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.FeaturestoreEntityVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: FeaturestoreEntityVersion, or the result of cls(response) + :return: FeaturestoreEntityVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.FeaturestoreEntityVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -573,105 +578,220 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('FeaturestoreEntityVersion', pipeline_response) + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.FeaturestoreEntityVersion" - **kwargs # type: Any - ): - # type: (...) -> "_models.FeaturestoreEntityVersion" - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityVersion"] + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturestoreEntityVersion, IO], + **kwargs: Any + ) -> _models.FeaturestoreEntityVersion: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'FeaturestoreEntityVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityVersion] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "FeaturestoreEntityVersion") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('FeaturestoreEntityVersion', pipeline_response) + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('FeaturestoreEntityVersion', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.FeaturestoreEntityVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturestoreEntityVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturestoreEntityVersion or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}"} # type: ignore + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.FeaturestoreEntityVersion]: + """Create or update version. + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either FeaturestoreEntityVersion or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.FeaturestoreEntityVersion" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.FeaturestoreEntityVersion"] + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.FeaturestoreEntityVersion, IO], + **kwargs: Any + ) -> LROPoller[_models.FeaturestoreEntityVersion]: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion + :param body: Version entity to create or update. Is either a FeaturestoreEntityVersion type or + a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -684,17 +804,17 @@ def begin_create_or_update( of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.FeaturestoreEntityVersion] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.FeaturestoreEntityVersion"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.FeaturestoreEntityVersion] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -704,29 +824,36 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('FeaturestoreEntityVersion', pipeline_response) + deserialized = self._deserialize("FeaturestoreEntityVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/featurestoreEntities/{name}/versions/{version}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_jobs_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_jobs_operations.py index 46f40df1b1b5..3f9ffb9e8550 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_jobs_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_jobs_operations.py @@ -6,380 +6,375 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - job_type = kwargs.pop('job_type', None) # type: Optional[str] - tag = kwargs.pop('tag', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - asset_name = kwargs.pop('asset_name', None) # type: Optional[str] - scheduled = kwargs.pop('scheduled', None) # type: Optional[bool] - schedule_id = kwargs.pop('schedule_id', None) # type: Optional[str] - properties = kwargs.pop('properties', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + job_type: Optional[str] = None, + tag: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + asset_name: Optional[str] = None, + scheduled: Optional[bool] = None, + schedule_id: Optional[str] = None, + properties: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if job_type is not None: - _query_parameters['jobType'] = _SERIALIZER.query("job_type", job_type, 'str') + _params["jobType"] = _SERIALIZER.query("job_type", job_type, "str") if tag is not None: - _query_parameters['tag'] = _SERIALIZER.query("tag", tag, 'str') + _params["tag"] = _SERIALIZER.query("tag", tag, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") if asset_name is not None: - _query_parameters['assetName'] = _SERIALIZER.query("asset_name", asset_name, 'str') + _params["assetName"] = _SERIALIZER.query("asset_name", asset_name, "str") if scheduled is not None: - _query_parameters['scheduled'] = _SERIALIZER.query("scheduled", scheduled, 'bool') + _params["scheduled"] = _SERIALIZER.query("scheduled", scheduled, "bool") if schedule_id is not None: - _query_parameters['scheduleId'] = _SERIALIZER.query("schedule_id", schedule_id, 'str') + _params["scheduleId"] = _SERIALIZER.query("schedule_id", schedule_id, "str") if properties is not None: - _query_parameters['properties'] = _SERIALIZER.query("properties", properties, 'str') + _params["properties"] = _SERIALIZER.query("properties", properties, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "id": _SERIALIZER.url("id", id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "id": _SERIALIZER.url("id", id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "id": _SERIALIZER.url("id", id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PATCH", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "id": _SERIALIZER.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_cancel_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_cancel_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "id": _SERIALIZER.url("id", id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class JobsOperations(object): - """JobsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class JobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`jobs` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - skip=None, # type: Optional[str] - job_type=None, # type: Optional[str] - tag=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - asset_name=None, # type: Optional[str] - scheduled=None, # type: Optional[bool] - schedule_id=None, # type: Optional[str] - properties=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.JobBaseResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + job_type: Optional[str] = None, + tag: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + asset_name: Optional[str] = None, + scheduled: Optional[bool] = None, + schedule_id: Optional[str] = None, + properties: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.JobBase"]: """Lists Jobs in the workspace. Lists Jobs in the workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param job_type: Type of job to be returned. + :param job_type: Type of job to be returned. Default value is None. :type job_type: str - :param tag: Jobs returned will have this tag key. + :param tag: Jobs returned will have this tag key. Default value is None. :type tag: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param asset_name: Asset name the job's named output is registered with. + :param asset_name: Asset name the job's named output is registered with. Default value is None. :type asset_name: str - :param scheduled: Indicator whether the job is scheduled job. + :param scheduled: Indicator whether the job is scheduled job. Default value is None. :type scheduled: bool - :param schedule_id: The scheduled id for listing the job triggered from. + :param schedule_id: The scheduled id for listing the job triggered from. Default value is None. :type schedule_id: str :param properties: Comma-separated list of property names (and optionally values). Example: - prop1,prop2=value2. + prop1,prop2=value2. Default value is None. :type properties: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either JobBaseResourceArmPaginatedResult or the result of - cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.JobBaseResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either JobBase or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.JobBase] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.JobBaseResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobBaseResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, job_type=job_type, tag=tag, @@ -388,27 +383,26 @@ def prepare_request(next_link=None): scheduled=scheduled, schedule_id=schedule_id, properties=properties, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - job_type=job_type, - tag=tag, - list_view_type=list_view_type, - asset_name=asset_name, - scheduled=scheduled, - schedule_id=schedule_id, - properties=properties, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -419,16 +413,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("JobBaseResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -439,82 +432,81 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete(self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any) -> LROPoller[None]: """Deletes a Job (asynchronous). Deletes a Job (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the Job. This is case-sensitive. + :param id: The name and identifier for the Job. This is case-sensitive. Required. :type id: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -526,95 +518,104 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}" + } @distributed_trace - def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.JobBase" + def get(self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any) -> _models.JobBase: """Gets a Job by name/id. Gets a Job by name/id. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the Job. This is case-sensitive. + :param id: The name and identifier for the Job. This is case-sensitive. Required. :type id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: JobBase, or the result of cls(response) + :return: JobBase or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.JobBase - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobBase"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.JobBase] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -622,72 +623,158 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('JobBase', pipeline_response) + deserialized = self._deserialize("JobBase", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}" + } + + @overload + def update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.PartialJobBasePartialResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: + """Updates a Job. + + Updates a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition to apply during the operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialJobBasePartialResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: + """Updates a Job. + + Updates a Job. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition to apply during the operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def update( self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - body, # type: "_models.PartialJobBasePartialResource" - **kwargs # type: Any - ): - # type: (...) -> "_models.JobBase" + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.PartialJobBasePartialResource, IO], + **kwargs: Any + ) -> _models.JobBase: """Updates a Job. Updates a Job. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the Job. This is case-sensitive. + :param id: The name and identifier for the Job. This is case-sensitive. Required. :type id: str - :param body: Job definition to apply during the operation. - :type body: ~azure.mgmt.machinelearningservices.models.PartialJobBasePartialResource + :param body: Job definition to apply during the operation. Is either a + PartialJobBasePartialResource type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialJobBasePartialResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: JobBase, or the result of cls(response) + :return: JobBase or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.JobBase - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobBase"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.JobBase] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'PartialJobBasePartialResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialJobBasePartialResource") request = build_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update.metadata['url'], + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -695,72 +782,152 @@ def update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('JobBase', pipeline_response) + deserialized = self._deserialize("JobBase", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore - + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}" + } - @distributed_trace + @overload def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - body, # type: "_models.JobBase" - **kwargs # type: Any - ): - # type: (...) -> "_models.JobBase" + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.JobBase, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: """Creates and executes a Job. Creates and executes a Job. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the Job. This is case-sensitive. + :param id: The name and identifier for the Job. This is case-sensitive. Required. :type id: str - :param body: Job definition object. + :param body: Job definition object. Required. :type body: ~azure.mgmt.machinelearningservices.models.JobBase + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.JobBase: + """Creates and executes a Job. + + Creates and executes a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: JobBase or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.JobBase + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_update( + self, resource_group_name: str, workspace_name: str, id: str, body: Union[_models.JobBase, IO], **kwargs: Any + ) -> _models.JobBase: + """Creates and executes a Job. + + Creates and executes a Job. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the Job. This is case-sensitive. Required. + :type id: str + :param body: Job definition object. Is either a JobBase type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.JobBase or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: JobBase, or the result of cls(response) + :return: JobBase or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.JobBase - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.JobBase"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.JobBase] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'JobBase') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "JobBase") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -769,88 +936,86 @@ def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('JobBase', pipeline_response) + deserialized = self._deserialize("JobBase", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('JobBase', pipeline_response) + deserialized = self._deserialize("JobBase", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}" + } def _cancel_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_cancel_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_cancel_request( resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._cancel_initial.metadata['url'], + template_url=self._cancel_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _cancel_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel"} # type: ignore - + _cancel_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel" + } @distributed_trace - def begin_cancel( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_cancel(self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any) -> LROPoller[None]: """Cancels a Job (asynchronous). Cancels a Job (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the Job. This is case-sensitive. + :param id: The name and identifier for the Job. This is case-sensitive. Required. :type id: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -862,42 +1027,50 @@ def begin_cancel( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._cancel_initial( + raw_result = self._cancel_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_cancel.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel"} # type: ignore + begin_cancel.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/jobs/{id}/cancel" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_labeling_jobs_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_labeling_jobs_operations.py index 4b93211bc41e..46e081fd4009 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_labeling_jobs_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_labeling_jobs_operations.py @@ -6,399 +6,395 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - top = kwargs.pop('top', None) # type: Optional[int] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + top: Optional[int] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if top is not None: - _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int') + _params["$top"] = _SERIALIZER.query("top", top, "int") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "id": _SERIALIZER.url("id", id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "id": _SERIALIZER.url("id", id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "id": _SERIALIZER.url("id", id, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_export_labels_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_export_labels_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "id": _SERIALIZER.url("id", id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_pause_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "id": _SERIALIZER.url("id", id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_resume_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_resume_request( + resource_group_name: str, workspace_name: str, id: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "id": _SERIALIZER.url("id", id, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "id": _SERIALIZER.url("id", id, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class LabelingJobsOperations(object): - """LabelingJobsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class LabelingJobsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`labeling_jobs` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - skip=None, # type: Optional[str] - top=None, # type: Optional[int] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.LabelingJobResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + top: Optional[int] = None, + **kwargs: Any + ) -> Iterable["_models.LabelingJob"]: """Lists labeling jobs in the workspace. Lists labeling jobs in the workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param top: Number of labeling jobs to return. + :param top: Number of labeling jobs to return. Default value is None. :type top: int :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either LabelingJobResourceArmPaginatedResult or the - result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.LabelingJobResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either LabelingJob or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJobResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.LabelingJobResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, top=top, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - top=top, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -409,16 +405,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("LabelingJobResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -429,61 +424,64 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> None: """Delete a labeling job. Delete a labeling job. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the LabelingJob. + :param id: The name and identifier for the LabelingJob. Required. :type id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -494,58 +492,60 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } @distributed_trace - def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.LabelingJob" + def get(self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any) -> _models.LabelingJob: """Gets a labeling job by name/id. Gets a labeling job by name/id. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the LabelingJob. + :param id: The name and identifier for the LabelingJob. Required. :type id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: LabelingJob, or the result of cls(response) + :return: LabelingJob or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.LabelingJob - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.LabelingJob"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJob] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -553,100 +553,207 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('LabelingJob', pipeline_response) + deserialized = self._deserialize("LabelingJob", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - body, # type: "_models.LabelingJob" - **kwargs # type: Any - ): - # type: (...) -> "_models.LabelingJob" - cls = kwargs.pop('cls', None) # type: ClsType["_models.LabelingJob"] + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.LabelingJob, IO], + **kwargs: Any + ) -> _models.LabelingJob: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'LabelingJob') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.LabelingJob] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "LabelingJob") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('LabelingJob', pipeline_response) + deserialized = self._deserialize("LabelingJob", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('LabelingJob', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("LabelingJob", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.LabelingJob, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.LabelingJob]: + """Creates or updates a labeling job (asynchronous). + + Creates or updates a labeling job (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: LabelingJob definition object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either LabelingJob or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - body, # type: "_models.LabelingJob" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.LabelingJob"] + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.LabelingJob, IO], + **kwargs: Any + ) -> LROPoller[_models.LabelingJob]: """Creates or updates a labeling job (asynchronous). Creates or updates a labeling job (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the LabelingJob. + :param id: The name and identifier for the LabelingJob. Required. :type id: str - :param body: LabelingJob definition object. - :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob + :param body: LabelingJob definition object. Is either a LabelingJob type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.LabelingJob or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -658,17 +765,17 @@ def begin_create_or_update( :return: An instance of LROPoller that returns either LabelingJob or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.LabelingJob] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.LabelingJob"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.LabelingJob] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -677,105 +784,212 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('LabelingJob', pipeline_response) + deserialized = self._deserialize("LabelingJob", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}" + } def _export_labels_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - body, # type: "_models.ExportSummary" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.ExportSummary"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExportSummary"]] + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.ExportSummary, IO], + **kwargs: Any + ) -> Optional[_models.ExportSummary]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ExportSummary') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ExportSummary]] = kwargs.pop("cls", None) - request = build_export_labels_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ExportSummary") + + request = build_export_labels_request( resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._export_labels_initial.metadata['url'], + content=_content, + template_url=self._export_labels_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ExportSummary', pipeline_response) + deserialized = self._deserialize("ExportSummary", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _export_labels_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels"} # type: ignore + _export_labels_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels" + } + + @overload + def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: _models.ExportSummary, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_export_labels( + self, + resource_group_name: str, + workspace_name: str, + id: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ExportSummary]: + """Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + Export labels from a labeling job (asynchronous). Using the URL in the Location header, the + status of the job export operation can be tracked. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param id: The name and identifier for the LabelingJob. Required. + :type id: str + :param body: The export summary. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ExportSummary or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_export_labels( self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - body, # type: "_models.ExportSummary" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.ExportSummary"] + resource_group_name: str, + workspace_name: str, + id: str, + body: Union[_models.ExportSummary, IO], + **kwargs: Any + ) -> LROPoller[_models.ExportSummary]: """Export labels from a labeling job (asynchronous). Using the URL in the Location header, the status of the job export operation can be tracked. @@ -783,13 +997,17 @@ def begin_export_labels( status of the job export operation can be tracked. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the LabelingJob. + :param id: The name and identifier for the LabelingJob. Required. :type id: str - :param body: The export summary. - :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary + :param body: The export summary. Is either a ExportSummary type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ExportSummary or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -801,17 +1019,17 @@ def begin_export_labels( :return: An instance of LROPoller that returns either ExportSummary or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ExportSummary] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ExportSummary"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ExportSummary] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._export_labels_initial( resource_group_name=resource_group_name, @@ -820,82 +1038,92 @@ def begin_export_labels( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ExportSummary', pipeline_response) + deserialized = self._deserialize("ExportSummary", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_export_labels.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels"} # type: ignore + begin_export_labels.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/exportLabels" + } @distributed_trace def pause( - self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.LabelingJobProperties" + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> _models.LabelingJobProperties: """Pause a labeling job. Pause a labeling job. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the LabelingJob. + :param id: The name and identifier for the LabelingJob. Required. :type id: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: LabelingJobProperties, or the result of cls(response) + :return: LabelingJobProperties or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.LabelingJobProperties - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.LabelingJobProperties"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJobProperties] = kwargs.pop("cls", None) - request = build_pause_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.pause.metadata['url'], + template_url=self.pause.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -903,91 +1131,91 @@ def pause( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('LabelingJobProperties', pipeline_response) + deserialized = self._deserialize("LabelingJobProperties", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - pause.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause"} # type: ignore - + pause.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/pause" + } def _resume_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.LabelingJobProperties"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.LabelingJobProperties"]] + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> Optional[_models.LabelingJobProperties]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_resume_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Optional[_models.LabelingJobProperties]] = kwargs.pop("cls", None) + + request = build_resume_request( resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._resume_initial.metadata['url'], + template_url=self._resume_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('LabelingJobProperties', pipeline_response) + deserialized = self._deserialize("LabelingJobProperties", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _resume_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume"} # type: ignore - + _resume_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume" + } @distributed_trace def begin_resume( - self, - resource_group_name, # type: str - workspace_name, # type: str - id, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.LabelingJobProperties"] + self, resource_group_name: str, workspace_name: str, id: str, **kwargs: Any + ) -> LROPoller[_models.LabelingJobProperties]: """Resume a labeling job (asynchronous). Resume a labeling job (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param id: The name and identifier for the LabelingJob. + :param id: The name and identifier for the LabelingJob. Required. :type id: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -1001,45 +1229,52 @@ def begin_resume( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.LabelingJobProperties] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.LabelingJobProperties"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.LabelingJobProperties] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._resume_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, id=id, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('LabelingJobProperties', pipeline_response) + deserialized = self._deserialize("LabelingJobProperties", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_resume.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume"} # type: ignore + begin_resume.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/labelingJobs/{id}/resume" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_managed_network_provisions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_managed_network_provisions_operations.py index 15848cf96864..686efff98b9b 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_managed_network_provisions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_managed_network_provisions_operations.py @@ -6,176 +6,275 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from io import IOBase +from typing import IO, Any, Callable, Dict, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off - -def build_provision_managed_network_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + + +def build_provision_managed_network_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class ManagedNetworkProvisionsOperations(object): - """ManagedNetworkProvisionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class ManagedNetworkProvisionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`managed_network_provisions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") def _provision_managed_network_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - body=None, # type: Optional["_models.ManagedNetworkProvisionOptions"] - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.ManagedNetworkProvisionStatus"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ManagedNetworkProvisionStatus"]] + resource_group_name: str, + workspace_name: str, + body: Optional[Union[_models.ManagedNetworkProvisionOptions, IO]] = None, + **kwargs: Any + ) -> Optional[_models.ManagedNetworkProvisionStatus]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - if body is not None: - _json = self._serialize.body(body, 'ManagedNetworkProvisionOptions') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ManagedNetworkProvisionStatus]] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = None + if body is not None: + _json = self._serialize.body(body, "ManagedNetworkProvisionOptions") + else: + _json = None - request = build_provision_managed_network_request_initial( - subscription_id=self._config.subscription_id, + request = build_provision_managed_network_request( resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._provision_managed_network_initial.metadata['url'], + content=_content, + template_url=self._provision_managed_network_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ManagedNetworkProvisionStatus', pipeline_response) + deserialized = self._deserialize("ManagedNetworkProvisionStatus", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _provision_managed_network_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork"} # type: ignore + _provision_managed_network_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork" + } + + @overload + def begin_provision_managed_network( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[_models.ManagedNetworkProvisionOptions] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ManagedNetworkProvisionStatus]: + """Provisions the managed network of a machine learning workspace. + + Provisions the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: Managed Network Provisioning Options for a machine learning workspace. Default + value is None. + :type body: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionOptions + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ManagedNetworkProvisionStatus or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_provision_managed_network( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ManagedNetworkProvisionStatus]: + """Provisions the managed network of a machine learning workspace. + + Provisions the managed network of a machine learning workspace. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: Managed Network Provisioning Options for a machine learning workspace. Default + value is None. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ManagedNetworkProvisionStatus or the + result of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_provision_managed_network( self, - resource_group_name, # type: str - workspace_name, # type: str - body=None, # type: Optional["_models.ManagedNetworkProvisionOptions"] - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.ManagedNetworkProvisionStatus"] + resource_group_name: str, + workspace_name: str, + body: Optional[Union[_models.ManagedNetworkProvisionOptions, IO]] = None, + **kwargs: Any + ) -> LROPoller[_models.ManagedNetworkProvisionStatus]: """Provisions the managed network of a machine learning workspace. Provisions the managed network of a machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param body: Managed Network Provisioning Options for a machine learning workspace. - :type body: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionOptions + :param body: Managed Network Provisioning Options for a machine learning workspace. Is either a + ManagedNetworkProvisionOptions type or a IO type. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionOptions or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -188,17 +287,17 @@ def begin_provision_managed_network( result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ManagedNetworkProvisionStatus] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedNetworkProvisionStatus"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ManagedNetworkProvisionStatus] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._provision_managed_network_initial( resource_group_name=resource_group_name, @@ -206,29 +305,36 @@ def begin_provision_managed_network( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ManagedNetworkProvisionStatus', pipeline_response) + deserialized = self._deserialize("ManagedNetworkProvisionStatus", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_provision_managed_network.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork"} # type: ignore + begin_provision_managed_network.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/provisionManagedNetwork" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_managed_network_settings_rule_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_managed_network_settings_rule_operations.py index 7e896ec74211..7ed112ed3160 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_managed_network_settings_rule_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_managed_network_settings_rule_operations.py @@ -6,263 +6,265 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - rule_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, rule_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "ruleName": _SERIALIZER.url("rule_name", rule_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "ruleName": _SERIALIZER.url("rule_name", rule_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - rule_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, rule_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "ruleName": _SERIALIZER.url("rule_name", rule_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "ruleName": _SERIALIZER.url("rule_name", rule_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - rule_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, rule_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "ruleName": _SERIALIZER.url("rule_name", rule_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "ruleName": _SERIALIZER.url("rule_name", rule_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class ManagedNetworkSettingsRuleOperations(object): - """ManagedNetworkSettingsRuleOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class ManagedNetworkSettingsRuleOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`managed_network_settings_rule` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.OutboundRuleListResult"] + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> Iterable["_models.OutboundRuleBasicResource"]: """Lists the managed network outbound rules for a machine learning workspace. Lists the managed network outbound rules for a machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OutboundRuleListResult or the result of + :return: An iterator like instance of either OutboundRuleBasicResource or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.OutboundRuleListResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OutboundRuleListResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.OutboundRuleListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -273,16 +275,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("OutboundRuleListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -293,80 +294,80 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - rule_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, rule_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, rule_name=rule_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - rule_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, workspace_name: str, rule_name: str, **kwargs: Any + ) -> LROPoller[None]: """Deletes an outbound rule from the managed network of a machine learning workspace. Deletes an outbound rule from the managed network of a machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param rule_name: Name of the workspace managed network outbound rule. + :param rule_name: Name of the workspace managed network outbound rule. Required. :type rule_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -378,95 +379,106 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, rule_name=rule_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - rule_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.OutboundRuleBasicResource" + self, resource_group_name: str, workspace_name: str, rule_name: str, **kwargs: Any + ) -> _models.OutboundRuleBasicResource: """Gets an outbound rule from the managed network of a machine learning workspace. Gets an outbound rule from the managed network of a machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param rule_name: Name of the workspace managed network outbound rule. + :param rule_name: Name of the workspace managed network outbound rule. Required. :type rule_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: OutboundRuleBasicResource, or the result of cls(response) + :return: OutboundRuleBasicResource or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OutboundRuleBasicResource"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OutboundRuleBasicResource] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, rule_name=rule_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -474,101 +486,206 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('OutboundRuleBasicResource', pipeline_response) + deserialized = self._deserialize("OutboundRuleBasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - rule_name, # type: str - body, # type: "_models.OutboundRuleBasicResource" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.OutboundRuleBasicResource"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OutboundRuleBasicResource"]] + resource_group_name: str, + workspace_name: str, + rule_name: str, + body: Union[_models.OutboundRuleBasicResource, IO], + **kwargs: Any + ) -> Optional[_models.OutboundRuleBasicResource]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'OutboundRuleBasicResource') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.OutboundRuleBasicResource]] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "OutboundRuleBasicResource") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, rule_name=rule_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('OutboundRuleBasicResource', pipeline_response) + deserialized = self._deserialize("OutboundRuleBasicResource", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + rule_name: str, + body: _models.OutboundRuleBasicResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OutboundRuleBasicResource]: + """Creates or updates an outbound rule in the managed network of a machine learning workspace. + Creates or updates an outbound rule in the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param rule_name: Name of the workspace managed network outbound rule. Required. + :type rule_name: str + :param body: Outbound Rule to be created or updated in the managed network of a machine + learning workspace. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OutboundRuleBasicResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + rule_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OutboundRuleBasicResource]: + """Creates or updates an outbound rule in the managed network of a machine learning workspace. + + Creates or updates an outbound rule in the managed network of a machine learning workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param rule_name: Name of the workspace managed network outbound rule. Required. + :type rule_name: str + :param body: Outbound Rule to be created or updated in the managed network of a machine + learning workspace. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OutboundRuleBasicResource or the result + of cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - rule_name, # type: str - body, # type: "_models.OutboundRuleBasicResource" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.OutboundRuleBasicResource"] + resource_group_name: str, + workspace_name: str, + rule_name: str, + body: Union[_models.OutboundRuleBasicResource, IO], + **kwargs: Any + ) -> LROPoller[_models.OutboundRuleBasicResource]: """Creates or updates an outbound rule in the managed network of a machine learning workspace. Creates or updates an outbound rule in the managed network of a machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param rule_name: Name of the workspace managed network outbound rule. + :param rule_name: Name of the workspace managed network outbound rule. Required. :type rule_name: str :param body: Outbound Rule to be created or updated in the managed network of a machine - learning workspace. - :type body: ~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource + learning workspace. Is either a OutboundRuleBasicResource type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -581,17 +698,17 @@ def begin_create_or_update( of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OutboundRuleBasicResource] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.OutboundRuleBasicResource"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OutboundRuleBasicResource] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -600,29 +717,36 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('OutboundRuleBasicResource', pipeline_response) + deserialized = self._deserialize("OutboundRuleBasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundRules/{ruleName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_model_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_model_containers_operations.py index 57a95dba1a69..647afc761d89 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_model_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_model_containers_operations.py @@ -6,285 +6,290 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - count = kwargs.pop('count', None) # type: Optional[int] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + count: Optional[int] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if count is not None: - _query_parameters['count'] = _SERIALIZER.query("count", count, 'int') + _params["count"] = _SERIALIZER.query("count", count, "int") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class ModelContainersOperations(object): - """ModelContainersOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class ModelContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`model_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - skip=None, # type: Optional[str] - count=None, # type: Optional[int] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ModelContainerResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + count: Optional[int] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.ModelContainer"]: """List model containers. List model containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param count: Maximum number of results to return. + :param count: Maximum number of results to return. Default value is None. :type count: int - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ModelContainerResourceArmPaginatedResult or the - result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either ModelContainer or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, count=count, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - count=count, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -295,16 +300,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ModelContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -315,61 +319,64 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -380,58 +387,60 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}" + } @distributed_trace - def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ModelContainer" + def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.ModelContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ModelContainer, or the result of cls(response) + :return: ModelContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -439,72 +448,158 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ModelContainer', pipeline_response) + deserialized = self._deserialize("ModelContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}" + } + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ModelContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelContainer: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelContainer or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.ModelContainer" - **kwargs # type: Any - ): - # type: (...) -> "_models.ModelContainer" + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.ModelContainer, IO], + **kwargs: Any + ) -> _models.ModelContainer: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer + :param body: Container entity to create or update. Is either a ModelContainer type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ModelContainer, or the result of cls(response) + :return: ModelContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ModelContainer] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'ModelContainer') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelContainer") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -513,15 +608,16 @@ def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('ModelContainer', pipeline_response) + deserialized = self._deserialize("ModelContainer", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('ModelContainer', pipeline_response) + deserialized = self._deserialize("ModelContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_model_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_model_versions_operations.py index 0b7d0aa63605..e336acf13c43 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_model_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_model_versions_operations.py @@ -6,374 +6,368 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - order_by = kwargs.pop('order_by', None) # type: Optional[str] - top = kwargs.pop('top', None) # type: Optional[int] - version = kwargs.pop('version', None) # type: Optional[str] - description = kwargs.pop('description', None) # type: Optional[str] - offset = kwargs.pop('offset', None) # type: Optional[int] - tags = kwargs.pop('tags', None) # type: Optional[str] - properties = kwargs.pop('properties', None) # type: Optional[str] - feed = kwargs.pop('feed', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - stage = kwargs.pop('stage', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + order_by: Optional[str] = None, + top: Optional[int] = None, + version: Optional[str] = None, + description: Optional[str] = None, + offset: Optional[int] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + feed: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if order_by is not None: - _query_parameters['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") if top is not None: - _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int') + _params["$top"] = _SERIALIZER.query("top", top, "int") if version is not None: - _query_parameters['version'] = _SERIALIZER.query("version", version, 'str') + _params["version"] = _SERIALIZER.query("version", version, "str") if description is not None: - _query_parameters['description'] = _SERIALIZER.query("description", description, 'str') + _params["description"] = _SERIALIZER.query("description", description, "str") if offset is not None: - _query_parameters['offset'] = _SERIALIZER.query("offset", offset, 'int') + _params["offset"] = _SERIALIZER.query("offset", offset, "int") if tags is not None: - _query_parameters['tags'] = _SERIALIZER.query("tags", tags, 'str') + _params["tags"] = _SERIALIZER.query("tags", tags, "str") if properties is not None: - _query_parameters['properties'] = _SERIALIZER.query("properties", properties, 'str') + _params["properties"] = _SERIALIZER.query("properties", properties, "str") if feed is not None: - _query_parameters['feed'] = _SERIALIZER.query("feed", feed, 'str') + _params["feed"] = _SERIALIZER.query("feed", feed, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") if stage is not None: - _query_parameters['stage'] = _SERIALIZER.query("stage", stage, 'str') + _params["stage"] = _SERIALIZER.query("stage", stage, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_package_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_package_request( + resource_group_name: str, workspace_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class ModelVersionsOperations(object): - """ModelVersionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class ModelVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`model_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - skip=None, # type: Optional[str] - order_by=None, # type: Optional[str] - top=None, # type: Optional[int] - version=None, # type: Optional[str] - description=None, # type: Optional[str] - offset=None, # type: Optional[int] - tags=None, # type: Optional[str] - properties=None, # type: Optional[str] - feed=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - stage=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ModelVersionResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + name: str, + skip: Optional[str] = None, + order_by: Optional[str] = None, + top: Optional[int] = None, + version: Optional[str] = None, + description: Optional[str] = None, + offset: Optional[int] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + feed: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.ModelVersion"]: """List model versions. List model versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Model name. This is case-sensitive. + :param name: Model name. This is case-sensitive. Required. :type name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param version: Model version. + :param version: Model version. Default value is None. :type version: str - :param description: Model description. + :param description: Model description. Default value is None. :type description: str - :param offset: Number of initial results to skip. + :param offset: Number of initial results to skip. Default value is None. :type offset: int :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param properties: Comma-separated list of property names (and optionally values). Example: - prop1,prop2=value2. + prop1,prop2=value2. Default value is None. :type properties: str - :param feed: Name of the feed. + :param feed: Name of the feed. Default value is None. :type feed: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType - :param stage: Model stage. + :param stage: Model stage. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ModelVersionResourceArmPaginatedResult or the - result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either ModelVersion or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, order_by=order_by, top=top, @@ -385,31 +379,26 @@ def prepare_request(next_link=None): feed=feed, list_view_type=list_view_type, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - name=name, - api_version=api_version, - skip=skip, - order_by=order_by, - top=top, - version=version, - description=description, - offset=offset, - tags=tags, - properties=properties, - feed=feed, - list_view_type=list_view_type, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -420,16 +409,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ModelVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -440,65 +428,67 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> None: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -509,62 +499,65 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ModelVersion" + self, resource_group_name: str, workspace_name: str, name: str, version: str, **kwargs: Any + ) -> _models.ModelVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ModelVersion, or the result of cls(response) + :return: ModelVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -572,76 +565,168 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ModelVersion', pipeline_response) + deserialized = self._deserialize("ModelVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}" + } + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.ModelVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelVersion: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.ModelVersion: + """Create or update version. + + Create or update version. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: ModelVersion or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.ModelVersion" - **kwargs # type: Any - ): - # type: (...) -> "_models.ModelVersion" + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.ModelVersion, IO], + **kwargs: Any + ) -> _models.ModelVersion: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion + :param body: Version entity to create or update. Is either a ModelVersion type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ModelVersion, or the result of cls(response) + :return: ModelVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ModelVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ModelVersion] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelVersion") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: @@ -650,108 +735,218 @@ def create_or_update( raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('ModelVersion', pipeline_response) + deserialized = self._deserialize("ModelVersion", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('ModelVersion', pipeline_response) + deserialized = self._deserialize("ModelVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) + return cls(pipeline_response, deserialized, {}) # type: ignore - return deserialized - - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}"} # type: ignore + return deserialized # type: ignore + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}" + } def _package_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.PackageRequest" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.PackageResponse"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.PackageResponse"]] + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.PackageRequest, IO], + **kwargs: Any + ) -> Optional[_models.PackageResponse]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PackageRequest') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.PackageResponse]] = kwargs.pop("cls", None) - request = build_package_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PackageRequest") + + request = build_package_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._package_initial.metadata['url'], + content=_content, + template_url=self._package_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('PackageResponse', pipeline_response) + deserialized = self._deserialize("PackageResponse", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _package_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package"} # type: ignore + _package_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package" + } + + @overload + def begin_package( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: _models.PackageRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_package( + self, + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.PackageResponse]: + """Model Version Package operation. + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Container name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_package( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.PackageRequest" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.PackageResponse"] + resource_group_name: str, + workspace_name: str, + name: str, + version: str, + body: Union[_models.PackageRequest, IO], + **kwargs: Any + ) -> LROPoller[_models.PackageResponse]: """Model Version Package operation. Model Version Package operation. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Container name. This is case-sensitive. + :param name: Container name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Package operation request body. - :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest + :param body: Package operation request body. Is either a PackageRequest type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -764,17 +959,17 @@ def begin_package( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.PackageResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PackageResponse] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._package_initial( resource_group_name=resource_group_name, @@ -784,29 +979,36 @@ def begin_package( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('PackageResponse', pipeline_response) + deserialized = self._deserialize("PackageResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_package.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package"} # type: ignore + begin_package.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/models/{name}/versions/{version}/package" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_online_deployments_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_online_deployments_operations.py index f52465e2cebf..ba531e69933c 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_online_deployments_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_online_deployments_operations.py @@ -6,435 +6,463 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - order_by = kwargs.pop('order_by', None) # type: Optional[str] - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if order_by is not None: - _query_parameters['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") if top is not None: - _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int') + _params["$top"] = _SERIALIZER.query("top", top, "int") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), - "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), - "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url( + "endpoint_name", endpoint_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "deploymentName": _SERIALIZER.url( + "deployment_name", deployment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PATCH", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url( + "endpoint_name", endpoint_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "deploymentName": _SERIALIZER.url( + "deployment_name", deployment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_get_logs_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), - "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_list_skus_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - count = kwargs.pop('count', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + subscription_id: str, + *, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/skus") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/skus", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), - "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), + "deploymentName": _SERIALIZER.url("deployment_name", deployment_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if count is not None: - _query_parameters['count'] = _SERIALIZER.query("count", count, 'int') + _params["count"] = _SERIALIZER.query("count", count, "int") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class OnlineDeploymentsOperations(object): - """OnlineDeploymentsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class OnlineDeploymentsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`online_deployments` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - order_by=None, # type: Optional[str] - top=None, # type: Optional[int] - skip=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.OnlineDeploymentTrackedResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.OnlineDeployment"]: """List Inference Endpoint Deployments. List Inference Endpoint Deployments. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Top of list. + :param top: Top of list. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OnlineDeploymentTrackedResourceArmPaginatedResult - or the result of cls(response) + :return: An iterator like instance of either OnlineDeployment or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.OnlineDeploymentTrackedResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OnlineDeploymentTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineDeploymentTrackedResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - endpoint_name=endpoint_name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -445,16 +473,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("OnlineDeploymentTrackedResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -465,87 +492,86 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> LROPoller[None]: """Delete Inference Endpoint Deployment (asynchronous). Delete Inference Endpoint Deployment (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param deployment_name: Inference Endpoint Deployment name. + :param deployment_name: Inference Endpoint Deployment name. Required. :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -557,100 +583,110 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.OnlineDeployment" + self, resource_group_name: str, workspace_name: str, endpoint_name: str, deployment_name: str, **kwargs: Any + ) -> _models.OnlineDeployment: """Get Inference Deployment Deployment. Get Inference Deployment Deployment. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param deployment_name: Inference Endpoint Deployment name. + :param deployment_name: Inference Endpoint Deployment name. Required. :type deployment_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: OnlineDeployment, or the result of cls(response) + :return: OnlineDeployment or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.OnlineDeployment - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineDeployment"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OnlineDeployment] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -658,106 +694,219 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('OnlineDeployment', pipeline_response) + deserialized = self._deserialize("OnlineDeployment", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}" + } def _update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - body, # type: "_models.PartialMinimalTrackedResourceWithSku" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.OnlineDeployment"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OnlineDeployment"]] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSku, IO], + **kwargs: Any + ) -> Optional[_models.OnlineDeployment]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PartialMinimalTrackedResourceWithSku') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.OnlineDeployment]] = kwargs.pop("cls", None) - request = build_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithSku") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._update_initial.metadata['url'], + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('OnlineDeployment', pipeline_response) + deserialized = self._deserialize("OnlineDeployment", pipeline_response) if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}" + } + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.PartialMinimalTrackedResourceWithSku, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineDeployment]: + """Update Online Deployment (asynchronous). + Update Online Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineDeployment]: + """Update Online Deployment (asynchronous). + + Update Online Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_update( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - body, # type: "_models.PartialMinimalTrackedResourceWithSku" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.OnlineDeployment"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSku, IO], + **kwargs: Any + ) -> LROPoller[_models.OnlineDeployment]: """Update Online Deployment (asynchronous). Update Online Deployment (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str - :param deployment_name: Inference Endpoint Deployment name. + :param deployment_name: Inference Endpoint Deployment name. Required. :type deployment_name: str - :param body: Online Endpoint entity to apply during operation. - :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku + :param body: Online Endpoint entity to apply during operation. Is either a + PartialMinimalTrackedResourceWithSku type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSku or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -770,17 +919,17 @@ def begin_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineDeployment"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OnlineDeployment] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._update_initial( resource_group_name=resource_group_name, @@ -790,122 +939,241 @@ def begin_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('OnlineDeployment', pipeline_response) + deserialized = self._deserialize("OnlineDeployment", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - body, # type: "_models.OnlineDeployment" - **kwargs # type: Any - ): - # type: (...) -> "_models.OnlineDeployment" - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineDeployment"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.OnlineDeployment, IO], + **kwargs: Any + ) -> _models.OnlineDeployment: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'OnlineDeployment') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OnlineDeployment] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "OnlineDeployment") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('OnlineDeployment', pipeline_response) + deserialized = self._deserialize("OnlineDeployment", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('OnlineDeployment', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("OnlineDeployment", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}" + } + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.OnlineDeployment, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineDeployment]: + """Create or update Inference Endpoint Deployment (asynchronous). + + Create or update Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Inference Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineDeployment + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineDeployment]: + """Create or update Inference Endpoint Deployment (asynchronous). + + Create or update Inference Endpoint Deployment (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: Inference Endpoint Deployment name. Required. + :type deployment_name: str + :param body: Inference Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineDeployment or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - body, # type: "_models.OnlineDeployment" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.OnlineDeployment"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.OnlineDeployment, IO], + **kwargs: Any + ) -> LROPoller[_models.OnlineDeployment]: """Create or update Inference Endpoint Deployment (asynchronous). Create or update Inference Endpoint Deployment (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param deployment_name: Inference Endpoint Deployment name. + :param deployment_name: Inference Endpoint Deployment name. Required. :type deployment_name: str - :param body: Inference Endpoint entity to apply during operation. - :type body: ~azure.mgmt.machinelearningservices.models.OnlineDeployment + :param body: Inference Endpoint entity to apply during operation. Is either a OnlineDeployment + type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineDeployment or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -918,17 +1186,17 @@ def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineDeployment] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineDeployment"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OnlineDeployment] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -938,93 +1206,191 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('OnlineDeployment', pipeline_response) + deserialized = self._deserialize("OnlineDeployment", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}" + } - @distributed_trace + @overload def get_logs( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - body, # type: "_models.DeploymentLogsRequest" - **kwargs # type: Any - ): - # type: (...) -> "_models.DeploymentLogs" + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: _models.DeploymentLogsRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DeploymentLogs: """Polls an Endpoint operation. Polls an Endpoint operation. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param deployment_name: The name and identifier for the endpoint. + :param deployment_name: The name and identifier for the endpoint. Required. :type deployment_name: str - :param body: The request containing parameters for retrieving logs. + :param body: The request containing parameters for retrieving logs. Required. :type body: ~azure.mgmt.machinelearningservices.models.DeploymentLogsRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DeploymentLogs, or the result of cls(response) + :return: DeploymentLogs or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def get_logs( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.DeploymentLogs: + """Polls an Endpoint operation. + + Polls an Endpoint operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The name and identifier for the endpoint. Required. + :type deployment_name: str + :param body: The request containing parameters for retrieving logs. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentLogs or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def get_logs( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + body: Union[_models.DeploymentLogsRequest, IO], + **kwargs: Any + ) -> _models.DeploymentLogs: + """Polls an Endpoint operation. + + Polls an Endpoint operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Inference endpoint name. Required. + :type endpoint_name: str + :param deployment_name: The name and identifier for the endpoint. Required. + :type deployment_name: str + :param body: The request containing parameters for retrieving logs. Is either a + DeploymentLogsRequest type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DeploymentLogsRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: DeploymentLogs or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.DeploymentLogs + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DeploymentLogs"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'DeploymentLogsRequest') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DeploymentLogs] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DeploymentLogsRequest") request = build_get_logs_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.get_logs.metadata['url'], + content=_content, + template_url=self.get_logs.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1032,87 +1398,95 @@ def get_logs( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('DeploymentLogs', pipeline_response) + deserialized = self._deserialize("DeploymentLogs", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_logs.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs"} # type: ignore - + get_logs.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/getLogs" + } @distributed_trace def list_skus( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - deployment_name, # type: str - count=None, # type: Optional[int] - skip=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.SkuResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + deployment_name: str, + count: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.SkuResource"]: """List Inference Endpoint Deployment Skus. List Inference Endpoint Deployment Skus. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Inference endpoint name. + :param endpoint_name: Inference endpoint name. Required. :type endpoint_name: str - :param deployment_name: Inference Endpoint Deployment name. + :param deployment_name: Inference Endpoint Deployment name. Required. :type deployment_name: str - :param count: Number of Skus to be retrieved in a page of results. + :param count: Number of Skus to be retrieved in a page of results. Default value is None. :type count: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either SkuResourceArmPaginatedResult or the result of - cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.SkuResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either SkuResource or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.SkuResource] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.SkuResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.SkuResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_skus_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, deployment_name=deployment_name, - api_version=api_version, + subscription_id=self._config.subscription_id, count=count, skip=skip, - template_url=self.list_skus.metadata['url'], + api_version=api_version, + template_url=self.list_skus.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_skus_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - endpoint_name=endpoint_name, - deployment_name=deployment_name, - api_version=api_version, - count=count, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -1123,16 +1497,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("SkuResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -1143,8 +1516,8 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_skus.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/skus"} # type: ignore + list_skus.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/deployments/{deploymentName}/skus" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_online_endpoints_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_online_endpoints_operations.py index 95dab3fd1410..236751d04e73 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_online_endpoints_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_online_endpoints_operations.py @@ -6,457 +6,450 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - name = kwargs.pop('name', None) # type: Optional[str] - count = kwargs.pop('count', None) # type: Optional[int] - compute_type = kwargs.pop('compute_type', None) # type: Optional[Union[str, "_models.EndpointComputeType"]] - skip = kwargs.pop('skip', None) # type: Optional[str] - tags = kwargs.pop('tags', None) # type: Optional[str] - properties = kwargs.pop('properties', None) # type: Optional[str] - order_by = kwargs.pop('order_by', None) # type: Optional[Union[str, "_models.OrderString"]] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + name: Optional[str] = None, + count: Optional[int] = None, + compute_type: Optional[Union[str, _models.EndpointComputeType]] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if name is not None: - _query_parameters['name'] = _SERIALIZER.query("name", name, 'str') + _params["name"] = _SERIALIZER.query("name", name, "str") if count is not None: - _query_parameters['count'] = _SERIALIZER.query("count", count, 'int') + _params["count"] = _SERIALIZER.query("count", count, "int") if compute_type is not None: - _query_parameters['computeType'] = _SERIALIZER.query("compute_type", compute_type, 'str') + _params["computeType"] = _SERIALIZER.query("compute_type", compute_type, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if tags is not None: - _query_parameters['tags'] = _SERIALIZER.query("tags", tags, 'str') + _params["tags"] = _SERIALIZER.query("tags", tags, "str") if properties is not None: - _query_parameters['properties'] = _SERIALIZER.query("properties", properties, 'str') + _params["properties"] = _SERIALIZER.query("properties", properties, "str") if order_by is not None: - _query_parameters['orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + _params["orderBy"] = _SERIALIZER.query("order_by", order_by, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PATCH", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url( + "endpoint_name", endpoint_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_list_keys_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_regenerate_keys_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_regenerate_keys_request( + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_get_token_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, endpoint_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "endpointName": _SERIALIZER.url("endpoint_name", endpoint_name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class OnlineEndpointsOperations(object): - """OnlineEndpointsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class OnlineEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`online_endpoints` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - name=None, # type: Optional[str] - count=None, # type: Optional[int] - compute_type=None, # type: Optional[Union[str, "_models.EndpointComputeType"]] - skip=None, # type: Optional[str] - tags=None, # type: Optional[str] - properties=None, # type: Optional[str] - order_by=None, # type: Optional[Union[str, "_models.OrderString"]] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.OnlineEndpointTrackedResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + name: Optional[str] = None, + count: Optional[int] = None, + compute_type: Optional[Union[str, _models.EndpointComputeType]] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + order_by: Optional[Union[str, _models.OrderString]] = None, + **kwargs: Any + ) -> Iterable["_models.OnlineEndpoint"]: """List Online Endpoints. List Online Endpoints. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Name of the endpoint. + :param name: Name of the endpoint. Default value is None. :type name: str - :param count: Number of endpoints to be retrieved in a page of results. + :param count: Number of endpoints to be retrieved in a page of results. Default value is None. :type count: int - :param compute_type: EndpointComputeType to be filtered by. + :param compute_type: EndpointComputeType to be filtered by. Known values are: "Managed", + "Kubernetes", and "AzureMLCompute". Default value is None. :type compute_type: str or ~azure.mgmt.machinelearningservices.models.EndpointComputeType - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: A set of tags with which to filter the returned models. It is a comma separated - string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 . + string of tags key or tags key=value. Example: tagKey1,tagKey2,tagKey3=value3 . Default value + is None. :type tags: str :param properties: A set of properties with which to filter the returned models. It is a comma separated string of properties key and/or properties key=value Example: - propKey1,propKey2,propKey3=value3 . + propKey1,propKey2,propKey3=value3 . Default value is None. :type properties: str - :param order_by: The option to order the response. + :param order_by: The option to order the response. Known values are: "CreatedAtDesc", + "CreatedAtAsc", "UpdatedAtDesc", and "UpdatedAtAsc". Default value is None. :type order_by: str or ~azure.mgmt.machinelearningservices.models.OrderString :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either OnlineEndpointTrackedResourceArmPaginatedResult or - the result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.OnlineEndpointTrackedResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either OnlineEndpoint or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OnlineEndpointTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineEndpointTrackedResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, name=name, count=count, compute_type=compute_type, @@ -464,26 +457,26 @@ def prepare_request(next_link=None): tags=tags, properties=properties, order_by=order_by, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - name=name, - count=count, - compute_type=compute_type, - skip=skip, - tags=tags, - properties=properties, - order_by=order_by, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -494,16 +487,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("OnlineEndpointTrackedResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -514,82 +506,83 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> LROPoller[None]: """Delete Online Endpoint (asynchronous). Delete Online Endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -601,95 +594,106 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.OnlineEndpoint" + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.OnlineEndpoint: """Get Online Endpoint. Get Online Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: OnlineEndpoint, or the result of cls(response) + :return: OnlineEndpoint or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineEndpoint"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OnlineEndpoint] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -697,102 +701,209 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}" + } def _update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - body, # type: "_models.PartialMinimalTrackedResourceWithIdentity" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.OnlineEndpoint"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.OnlineEndpoint"]] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], + **kwargs: Any + ) -> Optional[_models.OnlineEndpoint]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PartialMinimalTrackedResourceWithIdentity') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.OnlineEndpoint]] = kwargs.pop("cls", None) - request = build_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithIdentity") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._update_initial.metadata['url'], + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}" + } + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.PartialMinimalTrackedResourceWithIdentity, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineEndpoint]: + """Update Online Endpoint (asynchronous). + + Update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineEndpoint]: + """Update Online Endpoint (asynchronous). + Update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_update( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - body, # type: "_models.PartialMinimalTrackedResourceWithIdentity" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.OnlineEndpoint"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.PartialMinimalTrackedResourceWithIdentity, IO], + **kwargs: Any + ) -> LROPoller[_models.OnlineEndpoint]: """Update Online Endpoint (asynchronous). Update Online Endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str - :param body: Online Endpoint entity to apply during operation. + :param body: Online Endpoint entity to apply during operation. Is either a + PartialMinimalTrackedResourceWithIdentity type or a IO type. Required. :type body: - ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithIdentity or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -805,17 +916,17 @@ def begin_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineEndpoint"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OnlineEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._update_initial( resource_group_name=resource_group_name, @@ -824,117 +935,230 @@ def begin_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - body, # type: "_models.OnlineEndpoint" - **kwargs # type: Any - ): - # type: (...) -> "_models.OnlineEndpoint" - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineEndpoint"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.OnlineEndpoint, IO], + **kwargs: Any + ) -> _models.OnlineEndpoint: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'OnlineEndpoint') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OnlineEndpoint] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "OnlineEndpoint") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.OnlineEndpoint, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineEndpoint]: + """Create or update Online Endpoint (asynchronous). + + Create or update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.OnlineEndpoint]: + """Create or update Online Endpoint (asynchronous). + Create or update Online Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: Online Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either OnlineEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - body, # type: "_models.OnlineEndpoint" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.OnlineEndpoint"] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.OnlineEndpoint, IO], + **kwargs: Any + ) -> LROPoller[_models.OnlineEndpoint]: """Create or update Online Endpoint (asynchronous). Create or update Online Endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str - :param body: Online Endpoint entity to apply during operation. - :type body: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint + :param body: Online Endpoint entity to apply during operation. Is either a OnlineEndpoint type + or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.OnlineEndpoint or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -947,17 +1171,17 @@ def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.OnlineEndpoint] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.OnlineEndpoint"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.OnlineEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -966,82 +1190,92 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('OnlineEndpoint', pipeline_response) + deserialized = self._deserialize("OnlineEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}" + } @distributed_trace def list_keys( - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.EndpointAuthKeys" + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.EndpointAuthKeys: """List EndpointAuthKeys for an Endpoint using Key-based authentication. List EndpointAuthKeys for an Endpoint using Key-based authentication. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EndpointAuthKeys, or the result of cls(response) + :return: EndpointAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EndpointAuthKeys"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EndpointAuthKeys] = kwargs.pop("cls", None) - request = build_list_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_keys.metadata['url'], + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1049,94 +1283,194 @@ def list_keys( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EndpointAuthKeys', pipeline_response) + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys"} # type: ignore - + list_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/listKeys" + } def _regenerate_keys_initial( # pylint: disable=inconsistent-return-statements self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - body, # type: "_models.RegenerateEndpointKeysRequest" - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.RegenerateEndpointKeysRequest, IO], + **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'RegenerateEndpointKeysRequest') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_regenerate_keys_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "RegenerateEndpointKeysRequest") + + request = build_regenerate_keys_request( resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._regenerate_keys_initial.metadata['url'], + content=_content, + template_url=self._regenerate_keys_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _regenerate_keys_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys"} # type: ignore + _regenerate_keys_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys" + } + @overload + def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: _models.RegenerateEndpointKeysRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: RegenerateKeys request . Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param endpoint_name: Online Endpoint name. Required. + :type endpoint_name: str + :param body: RegenerateKeys request . Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace - def begin_regenerate_keys( # pylint: disable=inconsistent-return-statements + def begin_regenerate_keys( self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - body, # type: "_models.RegenerateEndpointKeysRequest" - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + resource_group_name: str, + workspace_name: str, + endpoint_name: str, + body: Union[_models.RegenerateEndpointKeysRequest, IO], + **kwargs: Any + ) -> LROPoller[None]: """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str - :param body: RegenerateKeys request . - :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest + :param body: RegenerateKeys request . Is either a RegenerateEndpointKeysRequest type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -1147,98 +1481,109 @@ def begin_regenerate_keys( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._regenerate_keys_initial( + raw_result = self._regenerate_keys_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_regenerate_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys"} # type: ignore + begin_regenerate_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/regenerateKeys" + } @distributed_trace def get_token( - self, - resource_group_name, # type: str - workspace_name, # type: str - endpoint_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.EndpointAuthToken" + self, resource_group_name: str, workspace_name: str, endpoint_name: str, **kwargs: Any + ) -> _models.EndpointAuthToken: """Retrieve a valid AML token for an Endpoint using AMLToken-based authentication. Retrieve a valid AML token for an Endpoint using AMLToken-based authentication. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param endpoint_name: Online Endpoint name. + :param endpoint_name: Online Endpoint name. Required. :type endpoint_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EndpointAuthToken, or the result of cls(response) + :return: EndpointAuthToken or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthToken - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EndpointAuthToken"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EndpointAuthToken] = kwargs.pop("cls", None) - request = build_get_token_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, endpoint_name=endpoint_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get_token.metadata['url'], + template_url=self.get_token.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1246,12 +1591,13 @@ def get_token( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EndpointAuthToken', pipeline_response) + deserialized = self._deserialize("EndpointAuthToken", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get_token.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token"} # type: ignore - + get_token.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/onlineEndpoints/{endpointName}/token" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_operations.py index fe9f95976bcd..8e712810f3f2 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_operations.py @@ -6,119 +6,123 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models +from .._serialization import Serializer from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off -def build_list_request( - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - accept = "application/json" +def build_list_request(**kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL _url = kwargs.pop("template_url", "/providers/Microsoft.MachineLearningServices/operations") # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class Operations(object): - """Operations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class Operations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`operations` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list( - self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.AmlOperationListResult"] + def list(self, **kwargs: Any) -> Iterable["_models.Operation"]: """Lists all of the available Azure Machine Learning Workspaces REST API operations. Lists all of the available Azure Machine Learning Workspaces REST API operations. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either AmlOperationListResult or the result of - cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.AmlOperationListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Operation or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Operation] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.OperationListResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.AmlOperationListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -126,19 +130,18 @@ def prepare_request(next_link=None): return request def extract_data(pipeline_response): - deserialized = self._deserialize("AmlOperationListResult", pipeline_response) + deserialized = self._deserialize("OperationListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -149,8 +152,6 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/providers/Microsoft.MachineLearningServices/operations"} # type: ignore + list.metadata = {"url": "/providers/Microsoft.MachineLearningServices/operations"} diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_patch.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_patch.py new file mode 100644 index 000000000000..f7dd32510333 --- /dev/null +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_patch.py @@ -0,0 +1,20 @@ +# ------------------------------------ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT License. +# ------------------------------------ +"""Customize generated code here. + +Follow our quickstart for examples: https://aka.ms/azsdk/python/dpcodegen/python/customize +""" +from typing import List + +__all__: List[str] = [] # Add all objects you want publicly available to users at this package level + + +def patch_sdk(): + """Do not remove from this file. + + `patch_sdk` is a last resort escape hatch that allows you to do customizations + you can't accomplish using the techniques described in + https://aka.ms/azsdk/python/dpcodegen/python/customize + """ diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_private_endpoint_connections_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_private_endpoint_connections_operations.py index 1adb12b673c0..e6234b259010 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_private_endpoint_connections_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_private_endpoint_connections_operations.py @@ -6,261 +6,281 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "privateEndpointConnectionName": _SERIALIZER.url("private_endpoint_connection_name", private_endpoint_connection_name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "privateEndpointConnectionName": _SERIALIZER.url( + "private_endpoint_connection_name", private_endpoint_connection_name, "str" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class PrivateEndpointConnectionsOperations(object): - """PrivateEndpointConnectionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class PrivateEndpointConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`private_endpoint_connections` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.PrivateEndpointConnectionListResult"] + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> Iterable["_models.PrivateEndpointConnection"]: """Called by end-users to get all PE connections. Called by end-users to get all PE connections. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PrivateEndpointConnectionListResult or the result - of cls(response) + :return: An iterator like instance of either PrivateEndpointConnection or the result of + cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnectionListResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PrivateEndpointConnectionListResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnectionListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -271,16 +291,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("PrivateEndpointConnectionListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -291,61 +310,64 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> None: """Called by end-users to delete a PE connection. Called by end-users to delete a PE connection. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -356,58 +378,62 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.PrivateEndpointConnection" + self, resource_group_name: str, workspace_name: str, private_endpoint_connection_name: str, **kwargs: Any + ) -> _models.PrivateEndpointConnection: """Called by end-users to get a PE connection. Called by end-users to get a PE connection. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. :type private_endpoint_connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateEndpointConnection, or the result of cls(response) + :return: PrivateEndpointConnection or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -415,26 +441,96 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" + } + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + body: _models.PrivateEndpointConnection, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. + + Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. + :type private_endpoint_connection_name: str + :param body: PrivateEndpointConnection object. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_update( + self, + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PrivateEndpointConnection: + """Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. + + Called by end-users to approve or reject a PE connection. + This method must validate and forward the call to NRP. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. + :type private_endpoint_connection_name: str + :param body: PrivateEndpointConnection object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PrivateEndpointConnection or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - private_endpoint_connection_name, # type: str - body, # type: "_models.PrivateEndpointConnection" - **kwargs # type: Any - ): - # type: (...) -> "_models.PrivateEndpointConnection" + resource_group_name: str, + workspace_name: str, + private_endpoint_connection_name: str, + body: Union[_models.PrivateEndpointConnection, IO], + **kwargs: Any + ) -> _models.PrivateEndpointConnection: """Called by end-users to approve or reject a PE connection. This method must validate and forward the call to NRP. @@ -442,47 +538,67 @@ def create_or_update( This method must validate and forward the call to NRP. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. + :param private_endpoint_connection_name: NRP Private Endpoint Connection Name. Required. :type private_endpoint_connection_name: str - :param body: PrivateEndpointConnection object. - :type body: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection + :param body: PrivateEndpointConnection object. Is either a PrivateEndpointConnection type or a + IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PrivateEndpointConnection, or the result of cls(response) + :return: PrivateEndpointConnection or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PrivateEndpointConnection - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateEndpointConnection"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PrivateEndpointConnection') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PrivateEndpointConnection") request = build_create_or_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, private_endpoint_connection_name=private_endpoint_connection_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_update.metadata['url'], + content=_content, + template_url=self.create_or_update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -490,12 +606,13 @@ def create_or_update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PrivateEndpointConnection', pipeline_response) + deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}"} # type: ignore - + create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateEndpointConnections/{privateEndpointConnectionName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_private_link_resources_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_private_link_resources_operations.py index 313f0105275d..34bb156137f4 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_private_link_resources_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_private_link_resources_operations.py @@ -6,98 +6,94 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class PrivateLinkResourcesOperations(object): - """PrivateLinkResourcesOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class PrivateLinkResourcesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`private_link_resources` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.PrivateLinkResourceListResult"] + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> Iterable["_models.PrivateLinkResource"]: """Called by Client (Portal, CLI, etc) to get available "private link resources" for the workspace. Each "private link resource" is a connection endpoint (IP address) to the resource. @@ -115,44 +111,57 @@ def list( Defined in the "[NRP] Private Endpoint Design" doc, topic "GET API for GroupIds". :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either PrivateLinkResourceListResult or the result of - cls(response) + :return: An iterator like instance of either PrivateLinkResource or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PrivateLinkResourceListResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.PrivateLinkResource] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.PrivateLinkResourceListResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.PrivateLinkResourceListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -163,16 +172,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("PrivateLinkResourceListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -183,8 +191,8 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/privateLinkResources" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_quotas_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_quotas_operations.py index 165138bc0044..c1c056a0b497 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_quotas_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_quotas_operations.py @@ -6,155 +6,199 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off - -def build_update_request( - location, # type: str - subscription_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + + +def build_update_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas", + ) # pylint: disable=line-too-long path_format_arguments = { - "location": _SERIALIZER.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "location": _SERIALIZER.url("location", location, "str", pattern=r"^[-\w\._]+$"), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_list_request( - subscription_id, # type: str - location, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "location": _SERIALIZER.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "location": _SERIALIZER.url("location", location, "str", pattern=r"^[-\w\._]+$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class QuotasOperations(object): - """QuotasOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class QuotasOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`quotas` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") - @distributed_trace + @overload def update( self, - location, # type: str - parameters, # type: "_models.QuotaUpdateParameters" - **kwargs # type: Any - ): - # type: (...) -> "_models.UpdateWorkspaceQuotasResult" + location: str, + parameters: _models.QuotaUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.UpdateWorkspaceQuotasResult: """Update quota for each VM family in workspace. - :param location: The location for update quota is queried. + :param location: The location for update quota is queried. Required. :type location: str - :param parameters: Quota update parameters. + :param parameters: Quota update parameters. Required. :type parameters: ~azure.mgmt.machinelearningservices.models.QuotaUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: UpdateWorkspaceQuotasResult, or the result of cls(response) + :return: UpdateWorkspaceQuotasResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, location: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any + ) -> _models.UpdateWorkspaceQuotasResult: + """Update quota for each VM family in workspace. + + :param location: The location for update quota is queried. Required. + :type location: str + :param parameters: Quota update parameters. Required. + :type parameters: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: UpdateWorkspaceQuotasResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def update( + self, location: str, parameters: Union[_models.QuotaUpdateParameters, IO], **kwargs: Any + ) -> _models.UpdateWorkspaceQuotasResult: + """Update quota for each VM family in workspace. + + :param location: The location for update quota is queried. Required. + :type location: str + :param parameters: Quota update parameters. Is either a QuotaUpdateParameters type or a IO + type. Required. + :type parameters: ~azure.mgmt.machinelearningservices.models.QuotaUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: UpdateWorkspaceQuotasResult or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.UpdateWorkspaceQuotasResult + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.UpdateWorkspaceQuotasResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.UpdateWorkspaceQuotasResult] = kwargs.pop("cls", None) - _json = self._serialize.body(parameters, 'QuotaUpdateParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(parameters, (IOBase, bytes)): + _content = parameters + else: + _json = self._serialize.body(parameters, "QuotaUpdateParameters") request = build_update_request( location=location, @@ -162,16 +206,19 @@ def update( api_version=api_version, content_type=content_type, json=_json, - template_url=self.update.metadata['url'], + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -179,59 +226,68 @@ def update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('UpdateWorkspaceQuotasResult', pipeline_response) + deserialized = self._deserialize("UpdateWorkspaceQuotasResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas"} # type: ignore - + update.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/updateQuotas" + } @distributed_trace - def list( - self, - location, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ListWorkspaceQuotas"] + def list(self, location: str, **kwargs: Any) -> Iterable["_models.ResourceQuota"]: """Gets the currently assigned Workspace Quotas based on VMFamily. - :param location: The location for which resource usage is queried. + :param location: The location for which resource usage is queried. Required. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ListWorkspaceQuotas or the result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ListWorkspaceQuotas] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either ResourceQuota or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ResourceQuota] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ListWorkspaceQuotas] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListWorkspaceQuotas"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, location=location, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - location=location, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -242,16 +298,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ListWorkspaceQuotas", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -262,8 +317,8 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/quotas" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registries_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registries_operations.py index caeb557593fe..58883cdd370c 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registries_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registries_operations.py @@ -6,358 +6,346 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off -def build_list_by_subscription_request( - subscription_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - accept = "application/json" +def build_list_by_subscription_request(subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/registries") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/registries" + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_list_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_get_request(resource_group_name: str, registry_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_update_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, registry_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PATCH", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_remove_regions_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_remove_regions_request( + resource_group_name: str, registry_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/removeRegions") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/removeRegions", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class RegistriesOperations(object): - """RegistriesOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistriesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registries` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list_by_subscription( - self, - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.RegistryTrackedResourceArmPaginatedResult"] + def list_by_subscription(self, **kwargs: Any) -> Iterable["_models.Registry"]: """List registries by subscription. List registries by subscription. :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either RegistryTrackedResourceArmPaginatedResult or the - result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.RegistryTrackedResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Registry or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.RegistryTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.RegistryTrackedResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_by_subscription_request( subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_by_subscription.metadata['url'], + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_by_subscription_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -368,16 +356,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("RegistryTrackedResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -388,58 +375,66 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_by_subscription.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/registries"} # type: ignore + list_by_subscription.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/registries" + } @distributed_trace - def list( - self, - resource_group_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.RegistryTrackedResourceArmPaginatedResult"] + def list(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.Registry"]: """List registries. List registries. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either RegistryTrackedResourceArmPaginatedResult or the - result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.RegistryTrackedResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Registry or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.RegistryTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.RegistryTrackedResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -450,16 +445,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("RegistryTrackedResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -470,77 +464,79 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, registry_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete(self, resource_group_name: str, registry_name: str, **kwargs: Any) -> LROPoller[None]: """Delete registry. Delete registry. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -552,90 +548,101 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}" + } @distributed_trace - def get( - self, - resource_group_name, # type: str - registry_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.Registry" + def get(self, resource_group_name: str, registry_name: str, **kwargs: Any) -> _models.Registry: """Get registry. Get registry. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Registry, or the result of cls(response) + :return: Registry or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Registry - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Registry"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Registry] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -643,68 +650,152 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Registry', pipeline_response) + deserialized = self._deserialize("Registry", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}" + } + + @overload + def update( + self, + resource_group_name: str, + registry_name: str, + body: _models.PartialRegistryPartialTrackedResource, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Registry: + """Update tags. + + Update tags. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialRegistryPartialTrackedResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Registry or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Registry + :raises ~azure.core.exceptions.HttpResponseError: + """ + @overload + def update( + self, + resource_group_name: str, + registry_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.Registry: + """Update tags. + + Update tags. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: Registry or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.Registry + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def update( self, - resource_group_name, # type: str - registry_name, # type: str - body, # type: "_models.PartialRegistryPartialTrackedResource" - **kwargs # type: Any - ): - # type: (...) -> "_models.Registry" + resource_group_name: str, + registry_name: str, + body: Union[_models.PartialRegistryPartialTrackedResource, IO], + **kwargs: Any + ) -> _models.Registry: """Update tags. Update tags. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param body: Details required to create the registry. - :type body: ~azure.mgmt.machinelearningservices.models.PartialRegistryPartialTrackedResource + :param body: Details required to create the registry. Is either a + PartialRegistryPartialTrackedResource type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PartialRegistryPartialTrackedResource or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Registry, or the result of cls(response) + :return: Registry or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Registry - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Registry"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Registry] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'PartialRegistryPartialTrackedResource') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialRegistryPartialTrackedResource") request = build_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update.metadata['url'], + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -712,91 +803,110 @@ def update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Registry', pipeline_response) + deserialized = self._deserialize("Registry", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore - + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}" + } def _create_or_update_initial( - self, - resource_group_name, # type: str - registry_name, # type: str - body, # type: "_models.Registry" - **kwargs # type: Any - ): - # type: (...) -> "_models.Registry" - cls = kwargs.pop('cls', None) # type: ClsType["_models.Registry"] + self, resource_group_name: str, registry_name: str, body: Union[_models.Registry, IO], **kwargs: Any + ) -> _models.Registry: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'Registry') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Registry] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Registry") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) if response.status_code == 200: - deserialized = self._deserialize('Registry', pipeline_response) + deserialized = self._deserialize("Registry", pipeline_response) if response.status_code == 201: - deserialized = self._deserialize('Registry', pipeline_response) + deserialized = self._deserialize("Registry", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) - - return deserialized + return cls(pipeline_response, deserialized, {}) # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + return deserialized # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}" + } - @distributed_trace + @overload def begin_create_or_update( self, - resource_group_name, # type: str - registry_name, # type: str - body, # type: "_models.Registry" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.Registry"] + resource_group_name: str, + registry_name: str, + body: _models.Registry, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Registry]: """Create or update registry. Create or update registry. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param body: Details required to create the registry. + :param body: Details required to create the registry. Required. :type body: ~azure.mgmt.machinelearningservices.models.Registry + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -807,17 +917,88 @@ def begin_create_or_update( Retry-After header is present. :return: An instance of LROPoller that returns either Registry or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Registry] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Registry"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Registry]: + """Create or update registry. + + Create or update registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Registry or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, resource_group_name: str, registry_name: str, body: Union[_models.Registry, IO], **kwargs: Any + ) -> LROPoller[_models.Registry]: + """Create or update registry. + + Create or update registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param body: Details required to create the registry. Is either a Registry type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.Registry or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Registry or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Registry] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -825,113 +1006,139 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('Registry', pipeline_response) + deserialized = self._deserialize("Registry", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}" + } def _remove_regions_initial( - self, - resource_group_name, # type: str - registry_name, # type: str - body, # type: "_models.Registry" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.Registry"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Registry"]] + self, resource_group_name: str, registry_name: str, body: Union[_models.Registry, IO], **kwargs: Any + ) -> Optional[_models.Registry]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'Registry') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.Registry]] = kwargs.pop("cls", None) - request = build_remove_regions_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Registry") + + request = build_remove_regions_request( resource_group_name=resource_group_name, registry_name=registry_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._remove_regions_initial.metadata['url'], + content=_content, + template_url=self._remove_regions_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('Registry', pipeline_response) + deserialized = self._deserialize("Registry", pipeline_response) if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _remove_regions_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/removeRegions"} # type: ignore - + _remove_regions_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/removeRegions" + } - @distributed_trace + @overload def begin_remove_regions( self, - resource_group_name, # type: str - registry_name, # type: str - body, # type: "_models.Registry" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.Registry"] + resource_group_name: str, + registry_name: str, + body: _models.Registry, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Registry]: """Remove regions from registry. Remove regions from registry. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param body: Details required to create the registry. + :param body: Details required to create the registry. Required. :type body: ~azure.mgmt.machinelearningservices.models.Registry + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -942,17 +1149,88 @@ def begin_remove_regions( Retry-After header is present. :return: An instance of LROPoller that returns either Registry or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Registry] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Registry"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + def begin_remove_regions( + self, + resource_group_name: str, + registry_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Registry]: + """Remove regions from registry. + + Remove regions from registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param body: Details required to create the registry. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Registry or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_remove_regions( + self, resource_group_name: str, registry_name: str, body: Union[_models.Registry, IO], **kwargs: Any + ) -> LROPoller[_models.Registry]: + """Remove regions from registry. + + Remove regions from registry. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param body: Details required to create the registry. Is either a Registry type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.Registry or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Registry or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Registry] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Registry] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._remove_regions_initial( resource_group_name=resource_group_name, @@ -960,29 +1238,36 @@ def begin_remove_regions( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('Registry', pipeline_response) + deserialized = self._deserialize("Registry", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_remove_regions.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/removeRegions"} # type: ignore + begin_remove_regions.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/removeRegions" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_code_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_code_containers_operations.py index 12992fe410ee..cca1bdb150ab 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_code_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_code_containers_operations.py @@ -6,271 +6,269 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, registry_name: str, subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, code_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "codeName": _SERIALIZER.url("code_name", code_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "codeName": _SERIALIZER.url("code_name", code_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, registry_name: str, code_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "codeName": _SERIALIZER.url("code_name", code_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "codeName": _SERIALIZER.url("code_name", code_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, code_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "codeName": _SERIALIZER.url("code_name", code_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "codeName": _SERIALIZER.url("code_name", code_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class RegistryCodeContainersOperations(object): - """RegistryCodeContainersOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryCodeContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_code_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name, # type: str - registry_name, # type: str - skip=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.CodeContainerResourceArmPaginatedResult"] + self, resource_group_name: str, registry_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.CodeContainer"]: """List containers. List containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either CodeContainerResourceArmPaginatedResult or the - result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either CodeContainer or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - api_version=api_version, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -281,16 +279,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("CodeContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -301,82 +298,84 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, registry_name: str, code_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, registry_name: str, code_name: str, **kwargs: Any + ) -> LROPoller[None]: """Delete Code container. Delete Code container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Container name. + :param code_name: Container name. Required. :type code_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -388,95 +387,105 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}" + } @distributed_trace - def get( - self, - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.CodeContainer" + def get(self, resource_group_name: str, registry_name: str, code_name: str, **kwargs: Any) -> _models.CodeContainer: """Get Code container. Get Code container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Container name. + :param code_name: Container name. Required. :type code_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: CodeContainer, or the result of cls(response) + :return: CodeContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.CodeContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -484,100 +493,210 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('CodeContainer', pipeline_response) + deserialized = self._deserialize("CodeContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - body, # type: "_models.CodeContainer" - **kwargs # type: Any - ): - # type: (...) -> "_models.CodeContainer" - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainer"] + resource_group_name: str, + registry_name: str, + code_name: str, + body: Union[_models.CodeContainer, IO], + **kwargs: Any + ) -> _models.CodeContainer: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'CodeContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CodeContainer] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeContainer") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('CodeContainer', pipeline_response) + deserialized = self._deserialize("CodeContainer", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('CodeContainer', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("CodeContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + body: _models.CodeContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CodeContainer]: + """Create or update Code container. + + Create or update Code container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CodeContainer or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CodeContainer]: + """Create or update Code container. - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore + Create or update Code container. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CodeContainer or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.CodeContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - body, # type: "_models.CodeContainer" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.CodeContainer"] + resource_group_name: str, + registry_name: str, + code_name: str, + body: Union[_models.CodeContainer, IO], + **kwargs: Any + ) -> LROPoller[_models.CodeContainer]: """Create or update Code container. Create or update Code container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Container name. + :param code_name: Container name. Required. :type code_name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer + :param body: Container entity to create or update. Is either a CodeContainer type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -589,17 +708,17 @@ def begin_create_or_update( :return: An instance of LROPoller that returns either CodeContainer or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.CodeContainer] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeContainer"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CodeContainer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -608,29 +727,36 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('CodeContainer', pipeline_response) + deserialized = self._deserialize("CodeContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_code_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_code_versions_operations.py index a3bc47279f0f..bacc763013a7 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_code_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_code_versions_operations.py @@ -6,344 +6,341 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - order_by = kwargs.pop('order_by', None) # type: Optional[str] - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + registry_name: str, + code_name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "codeName": _SERIALIZER.url("code_name", code_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "codeName": _SERIALIZER.url("code_name", code_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if order_by is not None: - _query_parameters['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") if top is not None: - _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int') + _params["$top"] = _SERIALIZER.query("top", top, "int") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, code_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "codeName": _SERIALIZER.url("code_name", code_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "codeName": _SERIALIZER.url("code_name", code_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, registry_name: str, code_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "codeName": _SERIALIZER.url("code_name", code_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "codeName": _SERIALIZER.url("code_name", code_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, code_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "codeName": _SERIALIZER.url("code_name", code_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "codeName": _SERIALIZER.url("code_name", code_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_get_start_pending_upload_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, registry_name: str, code_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}/startPendingUpload") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}/startPendingUpload", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "codeName": _SERIALIZER.url("code_name", code_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "codeName": _SERIALIZER.url("code_name", code_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class RegistryCodeVersionsOperations(object): - """RegistryCodeVersionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryCodeVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_code_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - order_by=None, # type: Optional[str] - top=None, # type: Optional[int] - skip=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.CodeVersionResourceArmPaginatedResult"] + resource_group_name: str, + registry_name: str, + code_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.CodeVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Container name. + :param code_name: Container name. Required. :type code_name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either CodeVersionResourceArmPaginatedResult or the - result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either CodeVersion or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - code_name=code_name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -354,16 +351,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("CodeVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -374,87 +370,87 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, registry_name: str, code_name: str, version: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, registry_name: str, code_name: str, version: str, **kwargs: Any + ) -> LROPoller[None]: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Container name. + :param code_name: Container name. Required. :type code_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -466,100 +462,111 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, version=version, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.CodeVersion" + self, resource_group_name: str, registry_name: str, code_name: str, version: str, **kwargs: Any + ) -> _models.CodeVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Container name. + :param code_name: Container name. Required. :type code_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: CodeVersion, or the result of cls(response) + :return: CodeVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.CodeVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.CodeVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -567,105 +574,221 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('CodeVersion', pipeline_response) + deserialized = self._deserialize("CodeVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - version, # type: str - body, # type: "_models.CodeVersion" - **kwargs # type: Any - ): - # type: (...) -> "_models.CodeVersion" - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersion"] + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: Union[_models.CodeVersion, IO], + **kwargs: Any + ) -> _models.CodeVersion: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'CodeVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CodeVersion] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "CodeVersion") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('CodeVersion', pipeline_response) + deserialized = self._deserialize("CodeVersion", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('CodeVersion', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("CodeVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}" + } + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: _models.CodeVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CodeVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CodeVersion or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.CodeVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param code_name: Container name. Required. + :type code_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either CodeVersion or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.CodeVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - version, # type: str - body, # type: "_models.CodeVersion" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.CodeVersion"] + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: Union[_models.CodeVersion, IO], + **kwargs: Any + ) -> LROPoller[_models.CodeVersion]: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Container name. + :param code_name: Container name. Required. :type code_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion + :param body: Version entity to create or update. Is either a CodeVersion type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.CodeVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -677,17 +800,17 @@ def begin_create_or_update( :return: An instance of LROPoller that returns either CodeVersion or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.CodeVersion] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.CodeVersion"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.CodeVersion] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -697,93 +820,194 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('CodeVersion', pipeline_response) + deserialized = self._deserialize("CodeVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}" + } - @distributed_trace + @overload def create_or_get_start_pending_upload( self, - resource_group_name, # type: str - registry_name, # type: str - code_name, # type: str - version, # type: str - body, # type: "_models.PendingUploadRequestDto" - **kwargs # type: Any - ): - # type: (...) -> "_models.PendingUploadResponseDto" + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: _models.PendingUploadRequestDto, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponseDto: """Generate a storage location and credential for the client to upload a code asset to. Generate a storage location and credential for the client to upload a code asset to. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param code_name: Pending upload name. This is case-sensitive. + :param code_name: Pending upload name. This is case-sensitive. Required. :type code_name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Pending upload request object. + :param body: Pending upload request object. Required. :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_get_start_pending_upload( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a code asset to. + + Generate a storage location and credential for the client to upload a code asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param code_name: Pending upload name. This is case-sensitive. Required. + :type code_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PendingUploadResponseDto, or the result of cls(response) + :return: PendingUploadResponseDto or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_get_start_pending_upload( + self, + resource_group_name: str, + registry_name: str, + code_name: str, + version: str, + body: Union[_models.PendingUploadRequestDto, IO], + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a code asset to. + + Generate a storage location and credential for the client to upload a code asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param code_name: Pending upload name. This is case-sensitive. Required. + :type code_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Is either a PendingUploadRequestDto type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PendingUploadResponseDto"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PendingUploadResponseDto] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'PendingUploadRequestDto') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PendingUploadRequestDto") request = build_create_or_get_start_pending_upload_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, code_name=code_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_get_start_pending_upload.metadata['url'], + content=_content, + template_url=self.create_or_get_start_pending_upload.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -791,12 +1015,13 @@ def create_or_get_start_pending_upload( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PendingUploadResponseDto', pipeline_response) + deserialized = self._deserialize("PendingUploadResponseDto", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_get_start_pending_upload.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}/startPendingUpload"} # type: ignore - + create_or_get_start_pending_upload.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/codes/{codeName}/versions/{version}/startPendingUpload" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_component_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_component_containers_operations.py index e8227fd5287f..77094164bc2e 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_component_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_component_containers_operations.py @@ -6,271 +6,276 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, registry_name: str, subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, component_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "componentName": _SERIALIZER.url("component_name", component_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "componentName": _SERIALIZER.url( + "component_name", component_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, registry_name: str, component_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "componentName": _SERIALIZER.url("component_name", component_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "componentName": _SERIALIZER.url( + "component_name", component_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, component_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "componentName": _SERIALIZER.url("component_name", component_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "componentName": _SERIALIZER.url( + "component_name", component_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class RegistryComponentContainersOperations(object): - """RegistryComponentContainersOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryComponentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_component_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name, # type: str - registry_name, # type: str - skip=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ComponentContainerResourceArmPaginatedResult"] + self, resource_group_name: str, registry_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.ComponentContainer"]: """List containers. List containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ComponentContainerResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either ComponentContainer or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComponentContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - api_version=api_version, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -281,16 +286,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ComponentContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -301,82 +305,84 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, registry_name: str, component_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, registry_name: str, component_name: str, **kwargs: Any + ) -> LROPoller[None]: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param component_name: Container name. + :param component_name: Container name. Required. :type component_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -388,95 +394,107 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ComponentContainer" + self, resource_group_name: str, registry_name: str, component_name: str, **kwargs: Any + ) -> _models.ComponentContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param component_name: Container name. + :param component_name: Container name. Required. :type component_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComponentContainer, or the result of cls(response) + :return: ComponentContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComponentContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -484,100 +502,212 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ComponentContainer', pipeline_response) + deserialized = self._deserialize("ComponentContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - body, # type: "_models.ComponentContainer" - **kwargs # type: Any - ): - # type: (...) -> "_models.ComponentContainer" - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentContainer"] + resource_group_name: str, + registry_name: str, + component_name: str, + body: Union[_models.ComponentContainer, IO], + **kwargs: Any + ) -> _models.ComponentContainer: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ComponentContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComponentContainer] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentContainer") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ComponentContainer', pipeline_response) + deserialized = self._deserialize("ComponentContainer", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('ComponentContainer', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ComponentContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}" + } + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + body: _models.ComponentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComponentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComponentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComponentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComponentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComponentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - body, # type: "_models.ComponentContainer" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.ComponentContainer"] + resource_group_name: str, + registry_name: str, + component_name: str, + body: Union[_models.ComponentContainer, IO], + **kwargs: Any + ) -> LROPoller[_models.ComponentContainer]: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param component_name: Container name. + :param component_name: Container name. Required. :type component_name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer + :param body: Container entity to create or update. Is either a ComponentContainer type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -590,17 +720,17 @@ def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComponentContainer] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentContainer"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComponentContainer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -609,29 +739,36 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ComponentContainer', pipeline_response) + deserialized = self._deserialize("ComponentContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_component_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_component_versions_operations.py index b77839070f68..b67cc1c1ed88 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_component_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_component_versions_operations.py @@ -6,308 +6,317 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - order_by = kwargs.pop('order_by', None) # type: Optional[str] - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[str] - stage = kwargs.pop('stage', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + registry_name: str, + component_name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + stage: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "componentName": _SERIALIZER.url("component_name", component_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "componentName": _SERIALIZER.url( + "component_name", component_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if order_by is not None: - _query_parameters['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") if top is not None: - _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int') + _params["$top"] = _SERIALIZER.query("top", top, "int") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if stage is not None: - _query_parameters['stage'] = _SERIALIZER.query("stage", stage, 'str') + _params["stage"] = _SERIALIZER.query("stage", stage, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, component_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "componentName": _SERIALIZER.url("component_name", component_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "componentName": _SERIALIZER.url( + "component_name", component_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, registry_name: str, component_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "componentName": _SERIALIZER.url("component_name", component_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "componentName": _SERIALIZER.url( + "component_name", component_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, component_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "componentName": _SERIALIZER.url("component_name", component_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "componentName": _SERIALIZER.url( + "component_name", component_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class RegistryComponentVersionsOperations(object): - """RegistryComponentVersionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryComponentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_component_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - order_by=None, # type: Optional[str] - top=None, # type: Optional[int] - skip=None, # type: Optional[str] - stage=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ComponentVersionResourceArmPaginatedResult"] + resource_group_name: str, + registry_name: str, + component_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + stage: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.ComponentVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param component_name: Container name. + :param component_name: Container name. Required. :type component_name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param stage: Component stage. + :param stage: Component stage. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ComponentVersionResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either ComponentVersion or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComponentVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - component_name=component_name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -318,16 +327,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ComponentVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -338,87 +346,87 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, registry_name: str, component_name: str, version: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, registry_name: str, component_name: str, version: str, **kwargs: Any + ) -> LROPoller[None]: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param component_name: Container name. + :param component_name: Container name. Required. :type component_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -430,100 +438,111 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, version=version, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ComponentVersion" + self, resource_group_name: str, registry_name: str, component_name: str, version: str, **kwargs: Any + ) -> _models.ComponentVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param component_name: Container name. + :param component_name: Container name. Required. :type component_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ComponentVersion, or the result of cls(response) + :return: ComponentVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ComponentVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ComponentVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -531,105 +550,223 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ComponentVersion', pipeline_response) + deserialized = self._deserialize("ComponentVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - version, # type: str - body, # type: "_models.ComponentVersion" - **kwargs # type: Any - ): - # type: (...) -> "_models.ComponentVersion" - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentVersion"] + resource_group_name: str, + registry_name: str, + component_name: str, + version: str, + body: Union[_models.ComponentVersion, IO], + **kwargs: Any + ) -> _models.ComponentVersion: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ComponentVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComponentVersion] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ComponentVersion") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, component_name=component_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ComponentVersion', pipeline_response) + deserialized = self._deserialize("ComponentVersion", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('ComponentVersion', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ComponentVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}" + } + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + version: str, + body: _models.ComponentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComponentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComponentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + component_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ComponentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param component_name: Container name. Required. + :type component_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ComponentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComponentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - registry_name, # type: str - component_name, # type: str - version, # type: str - body, # type: "_models.ComponentVersion" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.ComponentVersion"] + resource_group_name: str, + registry_name: str, + component_name: str, + version: str, + body: Union[_models.ComponentVersion, IO], + **kwargs: Any + ) -> LROPoller[_models.ComponentVersion]: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param component_name: Container name. + :param component_name: Container name. Required. :type component_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion + :param body: Version entity to create or update. Is either a ComponentVersion type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ComponentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -642,17 +779,17 @@ def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ComponentVersion] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ComponentVersion"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ComponentVersion] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -662,29 +799,36 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ComponentVersion', pipeline_response) + deserialized = self._deserialize("ComponentVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/components/{componentName}/versions/{version}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_data_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_data_containers_operations.py index be6063cd267e..ccaa5223d6f3 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_data_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_data_containers_operations.py @@ -6,279 +6,286 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - - accept = "application/json" + resource_group_name: str, + registry_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, registry_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class RegistryDataContainersOperations(object): - """RegistryDataContainersOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryDataContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_data_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - registry_name, # type: str - skip=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.DataContainerResourceArmPaginatedResult"] + resource_group_name: str, + registry_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.DataContainer"]: """List Data containers. List Data containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataContainerResourceArmPaginatedResult or the - result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.DataContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either DataContainer or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.DataContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - api_version=api_version, - skip=skip, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -289,16 +296,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("DataContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -309,82 +315,82 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, registry_name: str, name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete(self, resource_group_name: str, registry_name: str, name: str, **kwargs: Any) -> LROPoller[None]: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -396,95 +402,105 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, name=name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}" + } @distributed_trace - def get( - self, - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.DataContainer" + def get(self, resource_group_name: str, registry_name: str, name: str, **kwargs: Any) -> _models.DataContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataContainer, or the result of cls(response) + :return: DataContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DataContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -492,100 +508,210 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('DataContainer', pipeline_response) + deserialized = self._deserialize("DataContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - body, # type: "_models.DataContainer" - **kwargs # type: Any - ): - # type: (...) -> "_models.DataContainer" - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataContainer"] + resource_group_name: str, + registry_name: str, + name: str, + body: Union[_models.DataContainer, IO], + **kwargs: Any + ) -> _models.DataContainer: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'DataContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataContainer] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DataContainer") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('DataContainer', pipeline_response) + deserialized = self._deserialize("DataContainer", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('DataContainer', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("DataContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + name: str, + body: _models.DataContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DataContainer]: + """Create or update container. + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DataContainer or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.DataContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DataContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param name: Container name. Required. + :type name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DataContainer or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.DataContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - body, # type: "_models.DataContainer" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.DataContainer"] + resource_group_name: str, + registry_name: str, + name: str, + body: Union[_models.DataContainer, IO], + **kwargs: Any + ) -> LROPoller[_models.DataContainer]: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.DataContainer + :param body: Container entity to create or update. Is either a DataContainer type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -597,17 +723,17 @@ def begin_create_or_update( :return: An instance of LROPoller that returns either DataContainer or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.DataContainer] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataContainer"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataContainer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -616,29 +742,36 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('DataContainer', pipeline_response) + deserialized = self._deserialize("DataContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_data_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_data_versions_operations.py index 6eee101af5ce..3d0887a9a587 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_data_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_data_versions_operations.py @@ -6,364 +6,362 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - order_by = kwargs.pop('order_by', None) # type: Optional[str] - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[str] - tags = kwargs.pop('tags', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - - accept = "application/json" + resource_group_name: str, + registry_name: str, + name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if order_by is not None: - _query_parameters['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") if top is not None: - _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int') + _params["$top"] = _SERIALIZER.query("top", top, "int") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if tags is not None: - _query_parameters['$tags'] = _SERIALIZER.query("tags", tags, 'str') + _params["$tags"] = _SERIALIZER.query("tags", tags, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, registry_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_get_start_pending_upload_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, registry_name: str, name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}/startPendingUpload") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}/startPendingUpload", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class RegistryDataVersionsOperations(object): - """RegistryDataVersionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryDataVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_data_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - order_by=None, # type: Optional[str] - top=None, # type: Optional[int] - skip=None, # type: Optional[str] - tags=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.DataVersionBaseResourceArmPaginatedResult"] + resource_group_name: str, + registry_name: str, + name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + tags: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.DataVersionBase"]: """List data versions in the data container. List data versions in the data container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Data container's name. + :param name: Data container's name. Required. :type name: str - :param order_by: Please choose OrderBy value from ['createdtime', 'modifiedtime']. + :param order_by: Please choose OrderBy value from ['createdtime', 'modifiedtime']. Default + value is None. :type order_by: str :param top: Top count of results, top count cannot be greater than the page size. If topCount > page size, results with be default page size count - will be returned. + will be returned. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param list_view_type: [ListViewType.ActiveOnly, ListViewType.ArchivedOnly, - ListViewType.All]View type for including/excluding (for example) archived entities. + ListViewType.All]View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either DataVersionBaseResourceArmPaginatedResult or the - result of cls(response) + :return: An iterator like instance of either DataVersionBase or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.DataVersionBaseResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.DataVersionBase] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataVersionBaseResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataVersionBaseResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, name=name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, tags=tags, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - name=name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - tags=tags, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -374,16 +372,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("DataVersionBaseResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -394,87 +391,87 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, registry_name: str, name: str, version: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, registry_name: str, name: str, version: str, **kwargs: Any + ) -> LROPoller[None]: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -486,100 +483,111 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, name=name, version=version, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.DataVersionBase" + self, resource_group_name: str, registry_name: str, name: str, version: str, **kwargs: Any + ) -> _models.DataVersionBase: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: DataVersionBase, or the result of cls(response) + :return: DataVersionBase or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.DataVersionBase - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataVersionBase"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.DataVersionBase] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -587,105 +595,223 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('DataVersionBase', pipeline_response) + deserialized = self._deserialize("DataVersionBase", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.DataVersionBase" - **kwargs # type: Any - ): - # type: (...) -> "_models.DataVersionBase" - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataVersionBase"] + resource_group_name: str, + registry_name: str, + name: str, + version: str, + body: Union[_models.DataVersionBase, IO], + **kwargs: Any + ) -> _models.DataVersionBase: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'DataVersionBase') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataVersionBase] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "DataVersionBase") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('DataVersionBase', pipeline_response) + deserialized = self._deserialize("DataVersionBase", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('DataVersionBase', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("DataVersionBase", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + name: str, + version: str, + body: _models.DataVersionBase, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DataVersionBase]: + """Create or update version. + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DataVersionBase or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.DataVersionBase] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DataVersionBase]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param name: Container name. Required. + :type name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DataVersionBase or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.DataVersionBase] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.DataVersionBase" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.DataVersionBase"] + resource_group_name: str, + registry_name: str, + name: str, + version: str, + body: Union[_models.DataVersionBase, IO], + **kwargs: Any + ) -> LROPoller[_models.DataVersionBase]: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Container name. + :param name: Container name. Required. :type name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase + :param body: Version entity to create or update. Is either a DataVersionBase type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.DataVersionBase or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -698,17 +824,17 @@ def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.DataVersionBase] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.DataVersionBase"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DataVersionBase] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -718,93 +844,194 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('DataVersionBase', pipeline_response) + deserialized = self._deserialize("DataVersionBase", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}" + } - @distributed_trace + @overload def create_or_get_start_pending_upload( self, - resource_group_name, # type: str - registry_name, # type: str - name, # type: str - version, # type: str - body, # type: "_models.PendingUploadRequestDto" - **kwargs # type: Any - ): - # type: (...) -> "_models.PendingUploadResponseDto" + resource_group_name: str, + registry_name: str, + name: str, + version: str, + body: _models.PendingUploadRequestDto, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponseDto: """Generate a storage location and credential for the client to upload a data asset to. Generate a storage location and credential for the client to upload a data asset to. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param name: Data asset name. This is case-sensitive. + :param name: Data asset name. This is case-sensitive. Required. :type name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Pending upload request object. + :param body: Pending upload request object. Required. :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_get_start_pending_upload( + self, + resource_group_name: str, + registry_name: str, + name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a data asset to. + + Generate a storage location and credential for the client to upload a data asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param name: Data asset name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_get_start_pending_upload( + self, + resource_group_name: str, + registry_name: str, + name: str, + version: str, + body: Union[_models.PendingUploadRequestDto, IO], + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a data asset to. + + Generate a storage location and credential for the client to upload a data asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param name: Data asset name. This is case-sensitive. Required. + :type name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Is either a PendingUploadRequestDto type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PendingUploadResponseDto, or the result of cls(response) + :return: PendingUploadResponseDto or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PendingUploadResponseDto"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PendingUploadResponseDto] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'PendingUploadRequestDto') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PendingUploadRequestDto") request = build_create_or_get_start_pending_upload_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, name=name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_get_start_pending_upload.metadata['url'], + content=_content, + template_url=self.create_or_get_start_pending_upload.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -812,12 +1039,13 @@ def create_or_get_start_pending_upload( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PendingUploadResponseDto', pipeline_response) + deserialized = self._deserialize("PendingUploadResponseDto", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_get_start_pending_upload.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}/startPendingUpload"} # type: ignore - + create_or_get_start_pending_upload.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/data/{name}/versions/{version}/startPendingUpload" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_environment_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_environment_containers_operations.py index 76e884fa9ef5..6d05502b1dd9 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_environment_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_environment_containers_operations.py @@ -6,279 +6,294 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - - accept = "application/json" + resource_group_name: str, + registry_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, environment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "environmentName": _SERIALIZER.url("environment_name", environment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "environmentName": _SERIALIZER.url( + "environment_name", environment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, registry_name: str, environment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "environmentName": _SERIALIZER.url("environment_name", environment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "environmentName": _SERIALIZER.url( + "environment_name", environment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, environment_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "environmentName": _SERIALIZER.url("environment_name", environment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "environmentName": _SERIALIZER.url( + "environment_name", environment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class RegistryEnvironmentContainersOperations(object): - """RegistryEnvironmentContainersOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryEnvironmentContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_environment_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - registry_name, # type: str - skip=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.EnvironmentContainerResourceArmPaginatedResult"] + resource_group_name: str, + registry_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.EnvironmentContainer"]: """List environment containers. List environment containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either EnvironmentContainerResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either EnvironmentContainer or the result of + cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - api_version=api_version, - skip=skip, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -289,16 +304,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("EnvironmentContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -309,82 +323,84 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, registry_name: str, environment_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, registry_name: str, environment_name: str, **kwargs: Any + ) -> LROPoller[None]: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param environment_name: Container name. + :param environment_name: Container name. Required. :type environment_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -396,95 +412,107 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.EnvironmentContainer" + self, resource_group_name: str, registry_name: str, environment_name: str, **kwargs: Any + ) -> _models.EnvironmentContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param environment_name: Container name. This is case-sensitive. + :param environment_name: Container name. This is case-sensitive. Required. :type environment_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EnvironmentContainer, or the result of cls(response) + :return: EnvironmentContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -492,100 +520,212 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - body, # type: "_models.EnvironmentContainer" - **kwargs # type: Any - ): - # type: (...) -> "_models.EnvironmentContainer" - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentContainer"] + resource_group_name: str, + registry_name: str, + environment_name: str, + body: Union[_models.EnvironmentContainer, IO], + **kwargs: Any + ) -> _models.EnvironmentContainer: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'EnvironmentContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EnvironmentContainer] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentContainer") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}" + } + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + body: _models.EnvironmentContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.EnvironmentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EnvironmentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.EnvironmentContainer]: + """Create or update container. + + Create or update container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EnvironmentContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - body, # type: "_models.EnvironmentContainer" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.EnvironmentContainer"] + resource_group_name: str, + registry_name: str, + environment_name: str, + body: Union[_models.EnvironmentContainer, IO], + **kwargs: Any + ) -> LROPoller[_models.EnvironmentContainer]: """Create or update container. Create or update container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param environment_name: Container name. + :param environment_name: Container name. Required. :type environment_name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer + :param body: Container entity to create or update. Is either a EnvironmentContainer type or a + IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -598,17 +738,17 @@ def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentContainer] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentContainer"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EnvironmentContainer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -617,29 +757,36 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('EnvironmentContainer', pipeline_response) + deserialized = self._deserialize("EnvironmentContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_environment_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_environment_versions_operations.py index 45da2675d119..dc393a392388 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_environment_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_environment_versions_operations.py @@ -6,317 +6,341 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - order_by = kwargs.pop('order_by', None) # type: Optional[str] - top = kwargs.pop('top', None) # type: Optional[int] - skip = kwargs.pop('skip', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - stage = kwargs.pop('stage', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + registry_name: str, + environment_name: str, + subscription_id: str, + *, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "environmentName": _SERIALIZER.url("environment_name", environment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "environmentName": _SERIALIZER.url( + "environment_name", environment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if order_by is not None: - _query_parameters['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") if top is not None: - _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int') + _params["$top"] = _SERIALIZER.query("top", top, "int") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") if stage is not None: - _query_parameters['stage'] = _SERIALIZER.query("stage", stage, 'str') + _params["stage"] = _SERIALIZER.query("stage", stage, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "environmentName": _SERIALIZER.url("environment_name", environment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "environmentName": _SERIALIZER.url( + "environment_name", environment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "environmentName": _SERIALIZER.url("environment_name", environment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "environmentName": _SERIALIZER.url( + "environment_name", environment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + subscription_id: str, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "environmentName": _SERIALIZER.url("environment_name", environment_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "environmentName": _SERIALIZER.url( + "environment_name", environment_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$" + ), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class RegistryEnvironmentVersionsOperations(object): - """RegistryEnvironmentVersionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryEnvironmentVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_environment_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - order_by=None, # type: Optional[str] - top=None, # type: Optional[int] - skip=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - stage=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.EnvironmentVersionResourceArmPaginatedResult"] + resource_group_name: str, + registry_name: str, + environment_name: str, + order_by: Optional[str] = None, + top: Optional[int] = None, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + stage: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.EnvironmentVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param environment_name: Container name. This is case-sensitive. + :param environment_name: Container name. This is case-sensitive. Required. :type environment_name: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :param stage: Stage for including/excluding (for example) archived entities. Takes priority - over listViewType. + over listViewType. Default value is None. :type stage: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either EnvironmentVersionResourceArmPaginatedResult or - the result of cls(response) + :return: An iterator like instance of either EnvironmentVersion or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, - api_version=api_version, + subscription_id=self._config.subscription_id, order_by=order_by, top=top, skip=skip, list_view_type=list_view_type, stage=stage, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - environment_name=environment_name, - api_version=api_version, - order_by=order_by, - top=top, - skip=skip, - list_view_type=list_view_type, - stage=stage, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -327,16 +351,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("EnvironmentVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -347,87 +370,87 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, registry_name: str, environment_name: str, version: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, registry_name: str, environment_name: str, version: str, **kwargs: Any + ) -> LROPoller[None]: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param environment_name: Container name. + :param environment_name: Container name. Required. :type environment_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -439,100 +462,111 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, version=version, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.EnvironmentVersion" + self, resource_group_name: str, registry_name: str, environment_name: str, version: str, **kwargs: Any + ) -> _models.EnvironmentVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param environment_name: Container name. This is case-sensitive. + :param environment_name: Container name. This is case-sensitive. Required. :type environment_name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EnvironmentVersion, or the result of cls(response) + :return: EnvironmentVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EnvironmentVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -540,105 +574,223 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - version, # type: str - body, # type: "_models.EnvironmentVersion" - **kwargs # type: Any - ): - # type: (...) -> "_models.EnvironmentVersion" - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentVersion"] + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + body: Union[_models.EnvironmentVersion, IO], + **kwargs: Any + ) -> _models.EnvironmentVersion: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'EnvironmentVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EnvironmentVersion] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "EnvironmentVersion") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, environment_name=environment_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}" + } + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + body: _models.EnvironmentVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.EnvironmentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EnvironmentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.EnvironmentVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param environment_name: Container name. Required. + :type environment_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EnvironmentVersion or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - registry_name, # type: str - environment_name, # type: str - version, # type: str - body, # type: "_models.EnvironmentVersion" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.EnvironmentVersion"] + resource_group_name: str, + registry_name: str, + environment_name: str, + version: str, + body: Union[_models.EnvironmentVersion, IO], + **kwargs: Any + ) -> LROPoller[_models.EnvironmentVersion]: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param environment_name: Container name. + :param environment_name: Container name. Required. :type environment_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion + :param body: Version entity to create or update. Is either a EnvironmentVersion type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.EnvironmentVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -651,17 +803,17 @@ def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EnvironmentVersion] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.EnvironmentVersion"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EnvironmentVersion] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -671,29 +823,36 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('EnvironmentVersion', pipeline_response) + deserialized = self._deserialize("EnvironmentVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/environments/{environmentName}/versions/{version}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_model_containers_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_model_containers_operations.py index 099d01772ace..8110fda43cd2 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_model_containers_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_model_containers_operations.py @@ -6,279 +6,286 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - - accept = "application/json" + resource_group_name: str, + registry_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, model_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "modelName": _SERIALIZER.url("model_name", model_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, registry_name: str, model_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "modelName": _SERIALIZER.url("model_name", model_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, model_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "modelName": _SERIALIZER.url("model_name", model_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class RegistryModelContainersOperations(object): - """RegistryModelContainersOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryModelContainersOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_model_containers` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - registry_name, # type: str - skip=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ModelContainerResourceArmPaginatedResult"] + resource_group_name: str, + registry_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.ModelContainer"]: """List model containers. List model containers. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ModelContainerResourceArmPaginatedResult or the - result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelContainerResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either ModelContainer or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelContainerResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelContainerResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - api_version=api_version, - skip=skip, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -289,16 +296,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ModelContainerResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -309,82 +315,84 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, registry_name: str, model_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, registry_name: str, model_name: str, **kwargs: Any + ) -> LROPoller[None]: """Delete container. Delete container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. + :param model_name: Container name. Required. :type model_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -396,95 +404,107 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ModelContainer" + self, resource_group_name: str, registry_name: str, model_name: str, **kwargs: Any + ) -> _models.ModelContainer: """Get container. Get container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. This is case-sensitive. + :param model_name: Container name. This is case-sensitive. Required. :type model_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ModelContainer, or the result of cls(response) + :return: ModelContainer or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ModelContainer - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelContainer"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelContainer] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -492,100 +512,212 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ModelContainer', pipeline_response) + deserialized = self._deserialize("ModelContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - body, # type: "_models.ModelContainer" - **kwargs # type: Any - ): - # type: (...) -> "_models.ModelContainer" - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelContainer"] + resource_group_name: str, + registry_name: str, + model_name: str, + body: Union[_models.ModelContainer, IO], + **kwargs: Any + ) -> _models.ModelContainer: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ModelContainer') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ModelContainer] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelContainer") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ModelContainer', pipeline_response) + deserialized = self._deserialize("ModelContainer", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('ModelContainer', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ModelContainer", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}" + } + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + body: _models.ModelContainer, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ModelContainer]: + """Create or update model container. + + Create or update model container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param body: Container entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ModelContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ModelContainer]: + """Create or update model container. + + Create or update model container. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param body: Container entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ModelContainer or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ModelContainer] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - body, # type: "_models.ModelContainer" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.ModelContainer"] + resource_group_name: str, + registry_name: str, + model_name: str, + body: Union[_models.ModelContainer, IO], + **kwargs: Any + ) -> LROPoller[_models.ModelContainer]: """Create or update model container. Create or update model container. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. + :param model_name: Container name. Required. :type model_name: str - :param body: Container entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer + :param body: Container entity to create or update. Is either a ModelContainer type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelContainer or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -598,17 +730,17 @@ def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ModelContainer] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelContainer"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ModelContainer] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -617,29 +749,36 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ModelContainer', pipeline_response) + deserialized = self._deserialize("ModelContainer", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_model_versions_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_model_versions_operations.py index 02956c4eb336..c0a570982e27 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_model_versions_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_registry_model_versions_operations.py @@ -6,400 +6,391 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - order_by = kwargs.pop('order_by', None) # type: Optional[str] - top = kwargs.pop('top', None) # type: Optional[int] - version = kwargs.pop('version', None) # type: Optional[str] - description = kwargs.pop('description', None) # type: Optional[str] - tags = kwargs.pop('tags', None) # type: Optional[str] - properties = kwargs.pop('properties', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ListViewType"]] - - accept = "application/json" + resource_group_name: str, + registry_name: str, + model_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + order_by: Optional[str] = None, + top: Optional[int] = None, + version: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "modelName": _SERIALIZER.url("model_name", model_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if order_by is not None: - _query_parameters['$orderBy'] = _SERIALIZER.query("order_by", order_by, 'str') + _params["$orderBy"] = _SERIALIZER.query("order_by", order_by, "str") if top is not None: - _query_parameters['$top'] = _SERIALIZER.query("top", top, 'int') + _params["$top"] = _SERIALIZER.query("top", top, "int") if version is not None: - _query_parameters['version'] = _SERIALIZER.query("version", version, 'str') + _params["version"] = _SERIALIZER.query("version", version, "str") if description is not None: - _query_parameters['description'] = _SERIALIZER.query("description", description, 'str') + _params["description"] = _SERIALIZER.query("description", description, "str") if tags is not None: - _query_parameters['tags'] = _SERIALIZER.query("tags", tags, 'str') + _params["tags"] = _SERIALIZER.query("tags", tags, "str") if properties is not None: - _query_parameters['properties'] = _SERIALIZER.query("properties", properties, 'str') + _params["properties"] = _SERIALIZER.query("properties", properties, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, registry_name: str, model_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "modelName": _SERIALIZER.url("model_name", model_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, registry_name: str, model_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "modelName": _SERIALIZER.url("model_name", model_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, registry_name: str, model_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "modelName": _SERIALIZER.url("model_name", model_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_package_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_package_request( + resource_group_name: str, registry_name: str, model_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "modelName": _SERIALIZER.url("model_name", model_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_create_or_get_start_pending_upload_request( - subscription_id, # type: str - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - version, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, registry_name: str, model_name: str, version: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/startPendingUpload") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/startPendingUpload", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "registryName": _SERIALIZER.url("registry_name", registry_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$'), - "modelName": _SERIALIZER.url("model_name", model_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), - "version": _SERIALIZER.url("version", version, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "registryName": _SERIALIZER.url( + "registry_name", registry_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{2,32}$" + ), + "modelName": _SERIALIZER.url("model_name", model_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), + "version": _SERIALIZER.url("version", version, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class RegistryModelVersionsOperations(object): - """RegistryModelVersionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class RegistryModelVersionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`registry_model_versions` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - skip=None, # type: Optional[str] - order_by=None, # type: Optional[str] - top=None, # type: Optional[int] - version=None, # type: Optional[str] - description=None, # type: Optional[str] - tags=None, # type: Optional[str] - properties=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ListViewType"]] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ModelVersionResourceArmPaginatedResult"] + resource_group_name: str, + registry_name: str, + model_name: str, + skip: Optional[str] = None, + order_by: Optional[str] = None, + top: Optional[int] = None, + version: Optional[str] = None, + description: Optional[str] = None, + tags: Optional[str] = None, + properties: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.ModelVersion"]: """List versions. List versions. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. This is case-sensitive. + :param model_name: Container name. This is case-sensitive. Required. :type model_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param order_by: Ordering of list. + :param order_by: Ordering of list. Default value is None. :type order_by: str - :param top: Maximum number of records to return. + :param top: Maximum number of records to return. Default value is None. :type top: int - :param version: Version identifier. + :param version: Version identifier. Default value is None. :type version: str - :param description: Model description. + :param description: Model description. Default value is None. :type description: str :param tags: Comma-separated list of tag names (and optionally values). Example: - tag1,tag2=value2. + tag1,tag2=value2. Default value is None. :type tags: str :param properties: Comma-separated list of property names (and optionally values). Example: - prop1,prop2=value2. + prop1,prop2=value2. Default value is None. :type properties: str - :param list_view_type: View type for including/excluding (for example) archived entities. + :param list_view_type: View type for including/excluding (for example) archived entities. Known + values are: "ActiveOnly", "ArchivedOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ModelVersionResourceArmPaginatedResult or the - result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersionResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either ModelVersion or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelVersionResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelVersionResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, order_by=order_by, top=top, @@ -408,28 +399,26 @@ def prepare_request(next_link=None): tags=tags, properties=properties, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - registry_name=registry_name, - model_name=model_name, - api_version=api_version, - skip=skip, - order_by=order_by, - top=top, - version=version, - description=description, - tags=tags, - properties=properties, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -440,16 +429,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ModelVersionResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -460,87 +448,87 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, registry_name: str, model_name: str, version: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, registry_name: str, model_name: str, version: str, **kwargs: Any + ) -> LROPoller[None]: """Delete version. Delete version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. + :param model_name: Container name. Required. :type model_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -552,100 +540,111 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, version=version, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - version, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ModelVersion" + self, resource_group_name: str, registry_name: str, model_name: str, version: str, **kwargs: Any + ) -> _models.ModelVersion: """Get version. Get version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. This is case-sensitive. + :param model_name: Container name. This is case-sensitive. Required. :type model_name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ModelVersion, or the result of cls(response) + :return: ModelVersion or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ModelVersion - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelVersion"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ModelVersion] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -653,105 +652,221 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ModelVersion', pipeline_response) + deserialized = self._deserialize("ModelVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - version, # type: str - body, # type: "_models.ModelVersion" - **kwargs # type: Any - ): - # type: (...) -> "_models.ModelVersion" - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelVersion"] + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: Union[_models.ModelVersion, IO], + **kwargs: Any + ) -> _models.ModelVersion: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ModelVersion') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ModelVersion] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ModelVersion") + + request = build_create_or_update_request( resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ModelVersion', pipeline_response) + deserialized = self._deserialize("ModelVersion", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('ModelVersion', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ModelVersion", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: _models.ModelVersion, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ModelVersion]: + """Create or update version. - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore + Create or update version. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ModelVersion or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ModelVersion]: + """Create or update version. + + Create or update version. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. Required. + :type model_name: str + :param version: Version identifier. Required. + :type version: str + :param body: Version entity to create or update. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ModelVersion or the result of + cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ModelVersion] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - version, # type: str - body, # type: "_models.ModelVersion" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.ModelVersion"] + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: Union[_models.ModelVersion, IO], + **kwargs: Any + ) -> LROPoller[_models.ModelVersion]: """Create or update version. Create or update version. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. + :param model_name: Container name. Required. :type model_name: str - :param version: Version identifier. + :param version: Version identifier. Required. :type version: str - :param body: Version entity to create or update. - :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion + :param body: Version entity to create or update. Is either a ModelVersion type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.ModelVersion or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -763,17 +878,17 @@ def begin_create_or_update( :return: An instance of LROPoller that returns either ModelVersion or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ModelVersion] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ModelVersion"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ModelVersion] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -783,122 +898,241 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ModelVersion', pipeline_response) + deserialized = self._deserialize("ModelVersion", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}" + } def _package_initial( self, - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - version, # type: str - body, # type: "_models.PackageRequest" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.PackageResponse"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.PackageResponse"]] + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: Union[_models.PackageRequest, IO], + **kwargs: Any + ) -> Optional[_models.PackageResponse]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PackageRequest') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.PackageResponse]] = kwargs.pop("cls", None) - request = build_package_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PackageRequest") + + request = build_package_request( resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._package_initial.metadata['url'], + content=_content, + template_url=self._package_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('PackageResponse', pipeline_response) + deserialized = self._deserialize("PackageResponse", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _package_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package"} # type: ignore + _package_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package" + } + + @overload + def begin_package( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: _models.PackageRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.PackageResponse]: + """Model Version Package operation. + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_package( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.PackageResponse]: + """Model Version Package operation. + + Model Version Package operation. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Container name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Package operation request body. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either PackageResponse or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_package( self, - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - version, # type: str - body, # type: "_models.PackageRequest" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.PackageResponse"] + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: Union[_models.PackageRequest, IO], + **kwargs: Any + ) -> LROPoller[_models.PackageResponse]: """Model Version Package operation. Model Version Package operation. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Container name. This is case-sensitive. + :param model_name: Container name. This is case-sensitive. Required. :type model_name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Package operation request body. - :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest + :param body: Package operation request body. Is either a PackageRequest type or a IO type. + Required. + :type body: ~azure.mgmt.machinelearningservices.models.PackageRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -911,17 +1145,17 @@ def begin_package( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.PackageResponse] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.PackageResponse"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PackageResponse] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._package_initial( resource_group_name=resource_group_name, @@ -931,93 +1165,194 @@ def begin_package( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('PackageResponse', pipeline_response) + deserialized = self._deserialize("PackageResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_package.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package"} # type: ignore + begin_package.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/package" + } - @distributed_trace + @overload def create_or_get_start_pending_upload( self, - resource_group_name, # type: str - registry_name, # type: str - model_name, # type: str - version, # type: str - body, # type: "_models.PendingUploadRequestDto" - **kwargs # type: Any - ): - # type: (...) -> "_models.PendingUploadResponseDto" + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: _models.PendingUploadRequestDto, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponseDto: """Generate a storage location and credential for the client to upload a model asset to. Generate a storage location and credential for the client to upload a model asset to. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. :type registry_name: str - :param model_name: Model name. This is case-sensitive. + :param model_name: Model name. This is case-sensitive. Required. :type model_name: str - :param version: Version identifier. This is case-sensitive. + :param version: Version identifier. This is case-sensitive. Required. :type version: str - :param body: Pending upload request object. + :param body: Pending upload request object. Required. :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create_or_get_start_pending_upload( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a model asset to. + + Generate a storage location and credential for the client to upload a model asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Model name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: PendingUploadResponseDto, or the result of cls(response) + :return: PendingUploadResponseDto or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def create_or_get_start_pending_upload( + self, + resource_group_name: str, + registry_name: str, + model_name: str, + version: str, + body: Union[_models.PendingUploadRequestDto, IO], + **kwargs: Any + ) -> _models.PendingUploadResponseDto: + """Generate a storage location and credential for the client to upload a model asset to. + + Generate a storage location and credential for the client to upload a model asset to. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param registry_name: Name of Azure Machine Learning registry. This is case-insensitive. + Required. + :type registry_name: str + :param model_name: Model name. This is case-sensitive. Required. + :type model_name: str + :param version: Version identifier. This is case-sensitive. Required. + :type version: str + :param body: Pending upload request object. Is either a PendingUploadRequestDto type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.PendingUploadRequestDto or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: PendingUploadResponseDto or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.PendingUploadResponseDto + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.PendingUploadResponseDto"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.PendingUploadResponseDto] = kwargs.pop("cls", None) - _json = self._serialize.body(body, 'PendingUploadRequestDto') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PendingUploadRequestDto") request = build_create_or_get_start_pending_upload_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, registry_name=registry_name, model_name=model_name, version=version, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create_or_get_start_pending_upload.metadata['url'], + content=_content, + template_url=self.create_or_get_start_pending_upload.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1025,12 +1360,13 @@ def create_or_get_start_pending_upload( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('PendingUploadResponseDto', pipeline_response) + deserialized = self._deserialize("PendingUploadResponseDto", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create_or_get_start_pending_upload.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/startPendingUpload"} # type: ignore - + create_or_get_start_pending_upload.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/registries/{registryName}/models/{modelName}/versions/{version}/startPendingUpload" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_schedules_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_schedules_operations.py index 816b72986e01..7ec1e3f4477f 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_schedules_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_schedules_operations.py @@ -6,279 +6,285 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - list_view_type = kwargs.pop('list_view_type', None) # type: Optional[Union[str, "_models.ScheduleListViewType"]] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ScheduleListViewType]] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") if list_view_type is not None: - _query_parameters['listViewType'] = _SERIALIZER.query("list_view_type", list_view_type, 'str') + _params["listViewType"] = _SERIALIZER.query("list_view_type", list_view_type, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class SchedulesOperations(object): - """SchedulesOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +class SchedulesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`schedules` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - skip=None, # type: Optional[str] - list_view_type=None, # type: Optional[Union[str, "_models.ScheduleListViewType"]] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ScheduleResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + skip: Optional[str] = None, + list_view_type: Optional[Union[str, _models.ScheduleListViewType]] = None, + **kwargs: Any + ) -> Iterable["_models.Schedule"]: """List schedules in specified workspace. List schedules in specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str - :param list_view_type: Status filter for schedule. + :param list_view_type: Status filter for schedule. Known values are: "EnabledOnly", + "DisabledOnly", and "All". Default value is None. :type list_view_type: str or ~azure.mgmt.machinelearningservices.models.ScheduleListViewType :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ScheduleResourceArmPaginatedResult or the result - of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ScheduleResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Schedule or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Schedule] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ScheduleResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ScheduleResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, list_view_type=list_view_type, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - list_view_type=list_view_type, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -289,16 +295,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ScheduleResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -309,82 +314,81 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> LROPoller[None]: """Delete schedule. Delete schedule. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Schedule name. + :param name: Schedule name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -396,95 +400,104 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}" + } @distributed_trace - def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.Schedule" + def get(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> _models.Schedule: """Get schedule. Get schedule. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Schedule name. + :param name: Schedule name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Schedule, or the result of cls(response) + :return: Schedule or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Schedule - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Schedule"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Schedule] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -492,100 +505,121 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Schedule', pipeline_response) + deserialized = self._deserialize("Schedule", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}" + } def _create_or_update_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.Schedule" - **kwargs # type: Any - ): - # type: (...) -> "_models.Schedule" - cls = kwargs.pop('cls', None) # type: ClsType["_models.Schedule"] + self, resource_group_name: str, workspace_name: str, name: str, body: Union[_models.Schedule, IO], **kwargs: Any + ) -> _models.Schedule: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'Schedule') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Schedule] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Schedule") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('Schedule', pipeline_response) + deserialized = self._deserialize("Schedule", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('Schedule', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) - if cls: - return cls(pipeline_response, deserialized, response_headers) + deserialized = self._deserialize("Schedule", pipeline_response) - return deserialized + if cls: + return cls(pipeline_response, deserialized, response_headers) # type: ignore - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + return deserialized # type: ignore + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}" + } - @distributed_trace + @overload def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.Schedule" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.Schedule"] + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.Schedule, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Schedule]: """Create or update schedule. Create or update schedule. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Schedule name. + :param name: Schedule name. Required. :type name: str - :param body: Schedule definition. + :param body: Schedule definition. Required. :type body: ~azure.mgmt.machinelearningservices.models.Schedule + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -596,17 +630,90 @@ def begin_create_or_update( Retry-After header is present. :return: An instance of LROPoller that returns either Schedule or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Schedule] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Schedule"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Schedule]: + """Create or update schedule. + + Create or update schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Schedule name. Required. + :type name: str + :param body: Schedule definition. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Schedule or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Schedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, resource_group_name: str, workspace_name: str, name: str, body: Union[_models.Schedule, IO], **kwargs: Any + ) -> LROPoller[_models.Schedule]: + """Create or update schedule. + + Create or update schedule. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Schedule name. Required. + :type name: str + :param body: Schedule definition. Is either a Schedule type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Schedule or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Schedule or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Schedule] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Schedule] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -615,29 +722,36 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('Schedule', pipeline_response) + deserialized = self._deserialize("Schedule", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/schedules/{name}" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_serverless_endpoints_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_serverless_endpoints_operations.py index a51f45d54c2d..ece0d557d4dd 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_serverless_endpoints_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_serverless_endpoints_operations.py @@ -6,394 +6,383 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, subscription_id: str, *, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PATCH", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_list_keys_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/listKeys") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/listKeys", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_regenerate_keys_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_regenerate_keys_request( + resource_group_name: str, workspace_name: str, name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "name": _SERIALIZER.url("name", name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "name": _SERIALIZER.url("name", name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9\-_]{0,254}$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class ServerlessEndpointsOperations(object): - """ServerlessEndpointsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class ServerlessEndpointsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`serverless_endpoints` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( - self, - resource_group_name, # type: str - workspace_name, # type: str - skip=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ServerlessEndpointTrackedResourceArmPaginatedResult"] + self, resource_group_name: str, workspace_name: str, skip: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.ServerlessEndpoint"]: """List Serverless Endpoints. List Serverless Endpoints. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param skip: Continuation token for pagination. + :param skip: Continuation token for pagination. Default value is None. :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either - ServerlessEndpointTrackedResourceArmPaginatedResult or the result of cls(response) + :return: An iterator like instance of either ServerlessEndpoint or the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ServerlessEndpointTrackedResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ServerlessEndpointTrackedResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ServerlessEndpointTrackedResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, skip=skip, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - skip=skip, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -404,16 +393,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ServerlessEndpointTrackedResourceArmPaginatedResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -424,82 +412,81 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._delete_initial.metadata['url'], + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}"} # type: ignore - + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete(self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any) -> LROPoller[None]: """Delete Serverless Endpoint (asynchronous). Delete Serverless Endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Serverless Endpoint name. + :param name: Serverless Endpoint name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -511,95 +498,106 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } @distributed_trace def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ServerlessEndpoint" + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.ServerlessEndpoint: """Get Serverless Endpoint. Get Serverless Endpoint. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Serverless Endpoint name. + :param name: Serverless Endpoint name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ServerlessEndpoint, or the result of cls(response) + :return: ServerlessEndpoint or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ServerlessEndpoint - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ServerlessEndpoint"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -607,102 +605,210 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ServerlessEndpoint', pipeline_response) + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } def _update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.PartialMinimalTrackedResourceWithSkuAndIdentity" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.ServerlessEndpoint"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ServerlessEndpoint"]] + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSkuAndIdentity, IO], + **kwargs: Any + ) -> Optional[_models.ServerlessEndpoint]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'PartialMinimalTrackedResourceWithSkuAndIdentity') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.ServerlessEndpoint]] = kwargs.pop("cls", None) - request = build_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "PartialMinimalTrackedResourceWithSkuAndIdentity") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._update_initial.metadata['url'], + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ServerlessEndpoint', pipeline_response) + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) if response.status_code == 202: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.PartialMinimalTrackedResourceWithSkuAndIdentity, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ServerlessEndpoint]: + """Update Serverless Endpoint (asynchronous). + + Update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSkuAndIdentity + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ServerlessEndpoint]: + """Update Serverless Endpoint (asynchronous). + + Update Serverless Endpoint (asynchronous). + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.PartialMinimalTrackedResourceWithSkuAndIdentity" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.ServerlessEndpoint"] + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.PartialMinimalTrackedResourceWithSkuAndIdentity, IO], + **kwargs: Any + ) -> LROPoller[_models.ServerlessEndpoint]: """Update Serverless Endpoint (asynchronous). Update Serverless Endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Serverless Endpoint name. + :param name: Serverless Endpoint name. Required. :type name: str - :param body: Serverless Endpoint entity to apply during operation. + :param body: Serverless Endpoint entity to apply during operation. Is either a + PartialMinimalTrackedResourceWithSkuAndIdentity type or a IO type. Required. :type body: - ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSkuAndIdentity + ~azure.mgmt.machinelearningservices.models.PartialMinimalTrackedResourceWithSkuAndIdentity or + IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -715,17 +821,17 @@ def begin_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ServerlessEndpoint"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._update_initial( resource_group_name=resource_group_name, @@ -734,117 +840,230 @@ def begin_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ServerlessEndpoint', pipeline_response) + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } def _create_or_update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.ServerlessEndpoint" - **kwargs # type: Any - ): - # type: (...) -> "_models.ServerlessEndpoint" - cls = kwargs.pop('cls', None) # type: ClsType["_models.ServerlessEndpoint"] + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.ServerlessEndpoint, IO], + **kwargs: Any + ) -> _models.ServerlessEndpoint: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'ServerlessEndpoint') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "ServerlessEndpoint") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('ServerlessEndpoint', pipeline_response) + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) if response.status_code == 201: - response_headers['x-ms-async-operation-timeout']=self._deserialize('duration', response.headers.get('x-ms-async-operation-timeout')) - response_headers['Azure-AsyncOperation']=self._deserialize('str', response.headers.get('Azure-AsyncOperation')) - - deserialized = self._deserialize('ServerlessEndpoint', pipeline_response) + response_headers["x-ms-async-operation-timeout"] = self._deserialize( + "duration", response.headers.get("x-ms-async-operation-timeout") + ) + response_headers["Azure-AsyncOperation"] = self._deserialize( + "str", response.headers.get("Azure-AsyncOperation") + ) + + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) if cls: - return cls(pipeline_response, deserialized, response_headers) + return cls(pipeline_response, deserialized, response_headers) # type: ignore - return deserialized + return deserialized # type: ignore + + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.ServerlessEndpoint, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ServerlessEndpoint]: + """Create or update Serverless Endpoint (asynchronous). + + Create or update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ServerlessEndpoint + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}"} # type: ignore + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.ServerlessEndpoint]: + """Create or update Serverless Endpoint (asynchronous). + Create or update Serverless Endpoint (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: Serverless Endpoint entity to apply during operation. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either ServerlessEndpoint or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.ServerlessEndpoint" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.ServerlessEndpoint"] + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.ServerlessEndpoint, IO], + **kwargs: Any + ) -> LROPoller[_models.ServerlessEndpoint]: """Create or update Serverless Endpoint (asynchronous). Create or update Serverless Endpoint (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Serverless Endpoint name. + :param name: Serverless Endpoint name. Required. :type name: str - :param body: Serverless Endpoint entity to apply during operation. - :type body: ~azure.mgmt.machinelearningservices.models.ServerlessEndpoint + :param body: Serverless Endpoint entity to apply during operation. Is either a + ServerlessEndpoint type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.ServerlessEndpoint or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -857,17 +1076,17 @@ def begin_create_or_update( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.ServerlessEndpoint] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.ServerlessEndpoint"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.ServerlessEndpoint] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -876,82 +1095,92 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('ServerlessEndpoint', pipeline_response) + deserialized = self._deserialize("ServerlessEndpoint", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'original-uri'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "original-uri"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}" + } @distributed_trace def list_keys( - self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.EndpointAuthKeys" + self, resource_group_name: str, workspace_name: str, name: str, **kwargs: Any + ) -> _models.EndpointAuthKeys: """List EndpointAuthKeys for an Endpoint using Key-based authentication. List EndpointAuthKeys for an Endpoint using Key-based authentication. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Serverless Endpoint name. + :param name: Serverless Endpoint name. Required. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: EndpointAuthKeys, or the result of cls(response) + :return: EndpointAuthKeys or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.EndpointAuthKeys - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.EndpointAuthKeys"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.EndpointAuthKeys] = kwargs.pop("cls", None) - request = build_list_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_keys.metadata['url'], + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -959,100 +1188,204 @@ def list_keys( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('EndpointAuthKeys', pipeline_response) + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/listKeys"} # type: ignore - + list_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/listKeys" + } def _regenerate_keys_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.RegenerateEndpointKeysRequest" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.EndpointAuthKeys"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EndpointAuthKeys"]] + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.RegenerateEndpointKeysRequest, IO], + **kwargs: Any + ) -> Optional[_models.EndpointAuthKeys]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'RegenerateEndpointKeysRequest') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.EndpointAuthKeys]] = kwargs.pop("cls", None) - request = build_regenerate_keys_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "RegenerateEndpointKeysRequest") + + request = build_regenerate_keys_request( resource_group_name=resource_group_name, workspace_name=workspace_name, name=name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._regenerate_keys_initial.metadata['url'], + content=_content, + template_url=self._regenerate_keys_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('EndpointAuthKeys', pipeline_response) + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _regenerate_keys_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys"} # type: ignore + _regenerate_keys_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys" + } + + @overload + def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: _models.RegenerateEndpointKeysRequest, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.EndpointAuthKeys]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: RegenerateKeys request . Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EndpointAuthKeys or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EndpointAuthKeys] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_regenerate_keys( + self, + resource_group_name: str, + workspace_name: str, + name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.EndpointAuthKeys]: + """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Name of Azure Machine Learning workspace. Required. + :type workspace_name: str + :param name: Serverless Endpoint name. Required. + :type name: str + :param body: RegenerateKeys request . Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either EndpointAuthKeys or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EndpointAuthKeys] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_regenerate_keys( self, - resource_group_name, # type: str - workspace_name, # type: str - name, # type: str - body, # type: "_models.RegenerateEndpointKeysRequest" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.EndpointAuthKeys"] + resource_group_name: str, + workspace_name: str, + name: str, + body: Union[_models.RegenerateEndpointKeysRequest, IO], + **kwargs: Any + ) -> LROPoller[_models.EndpointAuthKeys]: """Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). Regenerate EndpointAuthKeys for an Endpoint using Key-based authentication (asynchronous). :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str - :param name: Serverless Endpoint name. + :param name: Serverless Endpoint name. Required. :type name: str - :param body: RegenerateKeys request . - :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest + :param body: RegenerateKeys request . Is either a RegenerateEndpointKeysRequest type or a IO + type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.RegenerateEndpointKeysRequest or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -1065,17 +1398,17 @@ def begin_regenerate_keys( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.EndpointAuthKeys] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.EndpointAuthKeys"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.EndpointAuthKeys] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._regenerate_keys_initial( resource_group_name=resource_group_name, @@ -1084,29 +1417,36 @@ def begin_regenerate_keys( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('EndpointAuthKeys', pipeline_response) + deserialized = self._deserialize("EndpointAuthKeys", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_regenerate_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys"} # type: ignore + begin_regenerate_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/serverlessEndpoints/{name}/regenerateKeys" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_usages_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_usages_operations.py index 2ffd3f588822..964eb329ba51 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_usages_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_usages_operations.py @@ -6,132 +6,135 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off -def build_list_request( - subscription_id, # type: str - location, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - accept = "application/json" +def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "location": _SERIALIZER.url("location", location, 'str', pattern=r'^[-\w\._]+$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "location": _SERIALIZER.url("location", location, "str", pattern=r"^[-\w\._]+$"), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class UsagesOperations(object): - """UsagesOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class UsagesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`usages` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list( - self, - location, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ListUsagesResult"] + def list(self, location: str, **kwargs: Any) -> Iterable["_models.Usage"]: """Gets the current usage information as well as limits for AML resources for given subscription and location. - :param location: The location for which resource usage is queried. + :param location: The location for which resource usage is queried. Required. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ListUsagesResult or the result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ListUsagesResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Usage or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Usage] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ListUsagesResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListUsagesResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, location=location, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - location=location, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -142,16 +145,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ListUsagesResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -162,8 +164,8 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/usages" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_virtual_machine_sizes_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_virtual_machine_sizes_operations.py index b83a3abb83bc..6f1c0e54f404 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_virtual_machine_sizes_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_virtual_machine_sizes_operations.py @@ -6,126 +6,122 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +from typing import Any, Callable, Dict, Optional, TypeVar + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Optional, TypeVar - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off -def build_list_request( - location, # type: str - subscription_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - accept = "application/json" +def build_list_request(location: str, subscription_id: str, **kwargs: Any) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes", + ) # pylint: disable=line-too-long path_format_arguments = { - "location": _SERIALIZER.url("location", location, 'str', pattern=r'^[-\w\._]+$'), - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "location": _SERIALIZER.url("location", location, "str", pattern=r"^[-\w\._]+$"), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class VirtualMachineSizesOperations(object): - """VirtualMachineSizesOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class VirtualMachineSizesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`virtual_machine_sizes` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list( - self, - location, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.VirtualMachineSizeListResult" + def list(self, location: str, **kwargs: Any) -> _models.VirtualMachineSizeListResult: """Returns supported VM Sizes in a location. - :param location: The location upon which virtual-machine-sizes is queried. + :param location: The location upon which virtual-machine-sizes is queried. Required. :type location: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: VirtualMachineSizeListResult, or the result of cls(response) + :return: VirtualMachineSizeListResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.VirtualMachineSizeListResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineSizeListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.VirtualMachineSizeListResult] = kwargs.pop("cls", None) - request = build_list_request( location=location, subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -133,12 +129,13 @@ def list( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('VirtualMachineSizeListResult', pipeline_response) + deserialized = self._deserialize("VirtualMachineSizeListResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes"} # type: ignore - + list.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/locations/{location}/vmSizes" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_workspace_connections_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_workspace_connections_operations.py index a23a56985311..7eeb6c99fc44 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_workspace_connections_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_workspace_connections_operations.py @@ -6,358 +6,428 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse +from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat +from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - target = kwargs.pop('target', None) # type: Optional[str] - category = kwargs.pop('category', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + subscription_id: str, + *, + target: Optional[str] = None, + category: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] if target is not None: - _query_parameters['target'] = _SERIALIZER.query("target", target, 'str') + _params["target"] = _SERIALIZER.query("target", target, "str") if category is not None: - _query_parameters['category'] = _SERIALIZER.query("category", category, 'str') - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["category"] = _SERIALIZER.query("category", category, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_delete_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, connection_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "connectionName": _SERIALIZER.url( + "connection_name", connection_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + connection_name: str, + subscription_id: str, + *, + aoai_models_to_deploy: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "connectionName": _SERIALIZER.url( + "connection_name", connection_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + if aoai_models_to_deploy is not None: + _params["aoaiModelsToDeploy"] = _SERIALIZER.query("aoai_models_to_deploy", aoai_models_to_deploy, "str") + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_update_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, connection_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "connectionName": _SERIALIZER.url( + "connection_name", connection_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PATCH", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) def build_create_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, workspace_name: str, connection_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "connectionName": _SERIALIZER.url( + "connection_name", connection_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) def build_list_secrets_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, + workspace_name: str, + connection_name: str, + subscription_id: str, + *, + aoai_models_to_deploy: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/listsecrets") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/listsecrets", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), - "connectionName": _SERIALIZER.url("connection_name", connection_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "connectionName": _SERIALIZER.url( + "connection_name", connection_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + if aoai_models_to_deploy is not None: + _params["aoaiModelsToDeploy"] = _SERIALIZER.query("aoai_models_to_deploy", aoai_models_to_deploy, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class WorkspaceConnectionsOperations(object): - """WorkspaceConnectionsOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_test_connection_request( + resource_group_name: str, workspace_name: str, connection_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + + # Construct URL + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/testconnection", + ) # pylint: disable=line-too-long + path_format_arguments = { + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + "connectionName": _SERIALIZER.url( + "connection_name", connection_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), + } + + _url: str = _url.format(**path_format_arguments) # type: ignore + + # Construct parameters + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") + + # Construct headers + if content_type is not None: + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class WorkspaceConnectionsOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`workspace_connections` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list( self, - resource_group_name, # type: str - workspace_name, # type: str - target=None, # type: Optional[str] - category=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult"] + resource_group_name: str, + workspace_name: str, + target: Optional[str] = None, + category: Optional[str] = None, + **kwargs: Any + ) -> Iterable["_models.WorkspaceConnectionPropertiesV2BasicResource"]: """Lists all the available machine learning workspaces connections under the specified workspace. Lists all the available machine learning workspaces connections under the specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param target: Target of the workspace connection. + :param target: Target of the workspace connection. Default value is None. :type target: str - :param category: Category of the workspace connection. + :param category: Category of the workspace connection. Default value is None. :type category: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either - WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult or the result of cls(response) + :return: An iterator like instance of either WorkspaceConnectionPropertiesV2BasicResource or + the result of cls(response) :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult] - :raises: ~azure.core.exceptions.HttpResponseError + ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, target=target, category=category, - template_url=self.list.metadata['url'], + api_version=api_version, + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - target=target, - category=category, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -365,19 +435,20 @@ def prepare_request(next_link=None): return request def extract_data(pipeline_response): - deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", pipeline_response) + deserialized = self._deserialize( + "WorkspaceConnectionPropertiesV2BasicResourceArmPaginatedResult", pipeline_response + ) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -388,61 +459,64 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections" + } @distributed_trace def delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None + self, resource_group_name: str, workspace_name: str, connection_name: str, **kwargs: Any + ) -> None: """Delete machine learning workspaces connections by name. Delete machine learning workspaces connections by name. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: None, or the result of cls(response) + :return: None or the result of cls(response) :rtype: None - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) - request = build_delete_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.delete.metadata['url'], + template_url=self.delete.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 204]: @@ -453,58 +527,71 @@ def delete( # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore - + delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" + } @distributed_trace def get( self, - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.WorkspaceConnectionPropertiesV2BasicResource" + resource_group_name: str, + workspace_name: str, + connection_name: str, + aoai_models_to_deploy: Optional[str] = None, + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: """Lists machine learning workspaces connections by name. Lists machine learning workspaces connections by name. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str + :param aoai_models_to_deploy: query parameter for which AOAI mode should be deployed. Default + value is None. + :type aoai_models_to_deploy: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnectionPropertiesV2BasicResource, or the result of cls(response) + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnectionPropertiesV2BasicResource"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, + subscription_id=self._config.subscription_id, + aoai_models_to_deploy=aoai_models_to_deploy, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -512,75 +599,161 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnectionPropertiesV2BasicResource', pipeline_response) + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" + } + @overload + def update( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[_models.WorkspaceConnectionUpdateParameter] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """Update machine learning workspaces connections under the specified workspace. + + Update machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: Parameters for workspace connection update. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUpdateParameter + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def update( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """Update machine learning workspaces connections under the specified workspace. + + Update machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: Parameters for workspace connection update. Default value is None. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def update( self, - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - body=None, # type: Optional["_models.WorkspaceConnectionUpdateParameter"] - **kwargs # type: Any - ): - # type: (...) -> "_models.WorkspaceConnectionPropertiesV2BasicResource" + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[Union[_models.WorkspaceConnectionUpdateParameter, IO]] = None, + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: """Update machine learning workspaces connections under the specified workspace. Update machine learning workspaces connections under the specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str - :param body: Parameters for workspace connection update. - :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUpdateParameter + :param body: Parameters for workspace connection update. Is either a + WorkspaceConnectionUpdateParameter type or a IO type. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionUpdateParameter or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnectionPropertiesV2BasicResource, or the result of cls(response) + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnectionPropertiesV2BasicResource"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] = kwargs.pop("cls", None) - if body is not None: - _json = self._serialize.body(body, 'WorkspaceConnectionUpdateParameter') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = None + if body is not None: + _json = self._serialize.body(body, "WorkspaceConnectionUpdateParameter") + else: + _json = None request = build_update_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.update.metadata['url'], + content=_content, + template_url=self.update.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -588,76 +761,165 @@ def update( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnectionPropertiesV2BasicResource', pipeline_response) + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore + update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" + } + + @overload + def create( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[_models.WorkspaceConnectionPropertiesV2BasicResource] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """Create or update machine learning workspaces connections under the specified workspace. + + Create or update machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: The object for creating or updating a new workspace connection. Default value is + None. + :type body: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def create( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: + """Create or update machine learning workspaces connections under the specified workspace. + Create or update machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: The object for creating or updating a new workspace connection. Default value is + None. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) + :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def create( self, - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - body=None, # type: Optional["_models.WorkspaceConnectionPropertiesV2BasicResource"] - **kwargs # type: Any - ): - # type: (...) -> "_models.WorkspaceConnectionPropertiesV2BasicResource" + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[Union[_models.WorkspaceConnectionPropertiesV2BasicResource, IO]] = None, + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: """Create or update machine learning workspaces connections under the specified workspace. Create or update machine learning workspaces connections under the specified workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str - :param body: The object for creating or updating a new workspace connection. + :param body: The object for creating or updating a new workspace connection. Is either a + WorkspaceConnectionPropertiesV2BasicResource type or a IO type. Default value is None. :type body: - ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnectionPropertiesV2BasicResource, or the result of cls(response) + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnectionPropertiesV2BasicResource"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - if body is not None: - _json = self._serialize.body(body, 'WorkspaceConnectionPropertiesV2BasicResource') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = None + if body is not None: + _json = self._serialize.body(body, "WorkspaceConnectionPropertiesV2BasicResource") + else: + _json = None request = build_create_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self.create.metadata['url'], + content=_content, + template_url=self.create.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -665,65 +927,78 @@ def create( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnectionPropertiesV2BasicResource', pipeline_response) + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - create.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}"} # type: ignore - + create.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}" + } @distributed_trace def list_secrets( self, - resource_group_name, # type: str - workspace_name, # type: str - connection_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.WorkspaceConnectionPropertiesV2BasicResource" + resource_group_name: str, + workspace_name: str, + connection_name: str, + aoai_models_to_deploy: Optional[str] = None, + **kwargs: Any + ) -> _models.WorkspaceConnectionPropertiesV2BasicResource: """List all the secrets of a machine learning workspaces connections. List all the secrets of a machine learning workspaces connections. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param connection_name: Friendly name of the workspace connection. + :param connection_name: Friendly name of the workspace connection. Required. :type connection_name: str + :param aoai_models_to_deploy: query parameter for which AOAI mode should be deployed. Default + value is None. + :type aoai_models_to_deploy: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: WorkspaceConnectionPropertiesV2BasicResource, or the result of cls(response) + :return: WorkspaceConnectionPropertiesV2BasicResource or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceConnectionPropertiesV2BasicResource"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceConnectionPropertiesV2BasicResource] = kwargs.pop("cls", None) - request = build_list_secrets_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, connection_name=connection_name, + subscription_id=self._config.subscription_id, + aoai_models_to_deploy=aoai_models_to_deploy, api_version=api_version, - template_url=self.list_secrets.metadata['url'], + template_url=self.list_secrets.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -731,12 +1006,253 @@ def list_secrets( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('WorkspaceConnectionPropertiesV2BasicResource', pipeline_response) + deserialized = self._deserialize("WorkspaceConnectionPropertiesV2BasicResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_secrets.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/listsecrets"} # type: ignore + list_secrets.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/listsecrets" + } + + def _test_connection_initial( # pylint: disable=inconsistent-return-statements + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[Union[_models.WorkspaceConnectionPropertiesV2BasicResource, IO]] = None, + **kwargs: Any + ) -> None: + error_map = { + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, + } + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + if body is not None: + _json = self._serialize.body(body, "WorkspaceConnectionPropertiesV2BasicResource") + else: + _json = None + + request = build_test_connection_request( + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + subscription_id=self._config.subscription_id, + api_version=api_version, + content_type=content_type, + json=_json, + content=_content, + template_url=self._test_connection_initial.metadata["url"], + headers=_headers, + params=_params, + ) + request = _convert_request(request) + request.url = self._client.format_url(request.url) + + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs + ) + + response = pipeline_response.http_response + + if response.status_code not in [202]: + map_error(status_code=response.status_code, response=response, error_map=error_map) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) + + response_headers = {} + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) + + if cls: + return cls(pipeline_response, None, response_headers) + _test_connection_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/testconnection" + } + + @overload + def begin_test_connection( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[_models.WorkspaceConnectionPropertiesV2BasicResource] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Test machine learning workspaces connections under the specified workspace. + + Test machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: Workspace Connection object. Default value is None. + :type body: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_test_connection( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[None]: + """Test machine learning workspaces connections under the specified workspace. + + Test machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: Workspace Connection object. Default value is None. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_test_connection( + self, + resource_group_name: str, + workspace_name: str, + connection_name: str, + body: Optional[Union[_models.WorkspaceConnectionPropertiesV2BasicResource, IO]] = None, + **kwargs: Any + ) -> LROPoller[None]: + """Test machine learning workspaces connections under the specified workspace. + + Test machine learning workspaces connections under the specified workspace. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param connection_name: Friendly name of the workspace connection. Required. + :type connection_name: str + :param body: Workspace Connection object. Is either a + WorkspaceConnectionPropertiesV2BasicResource type or a IO type. Default value is None. + :type body: + ~azure.mgmt.machinelearningservices.models.WorkspaceConnectionPropertiesV2BasicResource or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either None or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[None] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) + if cont_token is None: + raw_result = self._test_connection_initial( # type: ignore + resource_group_name=resource_group_name, + workspace_name=workspace_name, + connection_name=connection_name, + body=body, + api_version=api_version, + content_type=content_type, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, + **kwargs + ) + kwargs.pop("error_map", None) + + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements + if cls: + return cls(pipeline_response, None, {}) + + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling + if cont_token: + return LROPoller.from_continuation_token( + polling_method=polling_method, + continuation_token=cont_token, + client=self._client, + deserialization_callback=get_long_running_output, + ) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore + + begin_test_connection.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/testconnection" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_workspace_features_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_workspace_features_operations.py index 0c93dfbb6244..fb0edd7c7fe2 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_workspace_features_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_workspace_features_operations.py @@ -6,139 +6,145 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from typing import Any, Callable, Dict, Iterable, Optional, TypeVar + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class WorkspaceFeaturesOperations(object): - """WorkspaceFeaturesOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +class WorkspaceFeaturesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`workspace_features` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace - def list( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.ListAmlUserFeatureResult"] + def list(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> Iterable["_models.AmlUserFeature"]: """Lists all enabled features for a workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Name of Azure Machine Learning workspace. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either ListAmlUserFeatureResult or the result of - cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.ListAmlUserFeatureResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either AmlUserFeature or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.AmlUserFeature] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ListAmlUserFeatureResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListAmlUserFeatureResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list.metadata['url'], + template_url=self.list.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - workspace_name=workspace_name, - api_version=api_version, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -149,16 +155,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("ListAmlUserFeatureResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -169,8 +174,8 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features"} # type: ignore + list.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/features" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_workspaces_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_workspaces_operations.py index 925ea3903b9c..ad93a4f62b6a 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_workspaces_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/_restclient/v2023_08_01_preview/operations/_workspaces_operations.py @@ -6,641 +6,620 @@ # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- -from typing import TYPE_CHECKING - -from msrest import Serializer - -from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error +import urllib.parse +from io import IOBase +from typing import IO, Any, Callable, Dict, Iterable, Optional, TypeVar, Union, cast, overload + +from azure.core.exceptions import ( + ClientAuthenticationError, + HttpResponseError, + ResourceExistsError, + ResourceNotFoundError, + ResourceNotModifiedError, + map_error, +) from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace +from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models -from .._vendor import _convert_request, _format_url_section +from .._serialization import Serializer +from .._vendor import _convert_request -if TYPE_CHECKING: - # pylint: disable=unused-import,ungrouped-imports - from typing import Any, Callable, Dict, Iterable, Optional, TypeVar, Union - T = TypeVar('T') - ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] +T = TypeVar("T") +ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() _SERIALIZER.client_side_validation = False -# fmt: off + def build_list_by_subscription_request( - subscription_id, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - kind = kwargs.pop('kind', None) # type: Optional[str] - - accept = "application/json" + subscription_id: str, *, kind: Optional[str] = None, skip: Optional[str] = None, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces" + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if kind is not None: - _query_parameters['kind'] = _SERIALIZER.query("kind", kind, 'str') + _params["kind"] = _SERIALIZER.query("kind", kind, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) def build_list_by_resource_group_request( - subscription_id, # type: str - resource_group_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - skip = kwargs.pop('skip', None) # type: Optional[str] - kind = kwargs.pop('kind', None) # type: Optional[str] - - accept = "application/json" + resource_group_name: str, + subscription_id: str, + *, + kind: Optional[str] = None, + skip: Optional[str] = None, + **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') - if skip is not None: - _query_parameters['$skip'] = _SERIALIZER.query("skip", skip, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if kind is not None: - _query_parameters['kind'] = _SERIALIZER.query("kind", kind, 'str') + _params["kind"] = _SERIALIZER.query("kind", kind, "str") + if skip is not None: + _params["$skip"] = _SERIALIZER.query("skip", skip, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_delete_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - force_to_purge = kwargs.pop('force_to_purge', False) # type: Optional[bool] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_delete_request( + resource_group_name: str, workspace_name: str, subscription_id: str, *, force_to_purge: bool = False, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") if force_to_purge is not None: - _query_parameters['forceToPurge'] = _SERIALIZER.query("force_to_purge", force_to_purge, 'bool') + _params["forceToPurge"] = _SERIALIZER.query("force_to_purge", force_to_purge, "bool") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="DELETE", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs) def build_get_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_update_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PATCH", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_create_or_update_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_create_or_update_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="PUT", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_diagnose_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', None) # type: Optional[str] - - accept = "application/json" + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_diagnose_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] if content_type is not None: - _header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str") + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_list_keys_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_list_notebook_access_token_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_list_notebook_keys_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_list_storage_account_keys_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) def build_list_outbound_network_dependencies_endpoints_request( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="GET", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_prepare_notebook_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_prepare_notebook_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - - -def build_resync_keys_request_initial( - subscription_id, # type: str - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any -): - # type: (...) -> HttpRequest - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - - accept = "application/json" + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +def build_resync_keys_request( + resource_group_name: str, workspace_name: str, subscription_id: str, **kwargs: Any +) -> HttpRequest: + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-08-01-preview")) + accept = _headers.pop("Accept", "application/json") + # Construct URL - _url = kwargs.pop("template_url", "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys") # pylint: disable=line-too-long + _url = kwargs.pop( + "template_url", + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys", + ) # pylint: disable=line-too-long path_format_arguments = { - "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str', min_length=1), - "resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1), - "workspaceName": _SERIALIZER.url("workspace_name", workspace_name, 'str', pattern=r'^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$'), + "subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1), + "resourceGroupName": _SERIALIZER.url( + "resource_group_name", resource_group_name, "str", max_length=90, min_length=1 + ), + "workspaceName": _SERIALIZER.url( + "workspace_name", workspace_name, "str", pattern=r"^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" + ), } - _url = _format_url_section(_url, **path_format_arguments) + _url: str = _url.format(**path_format_arguments) # type: ignore # Construct parameters - _query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any] - _query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str') + _params["api-version"] = _SERIALIZER.query("api_version", api_version, "str") # Construct headers - _header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any] - _header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') - - return HttpRequest( - method="POST", - url=_url, - params=_query_parameters, - headers=_header_parameters, - **kwargs - ) - -# fmt: on -class WorkspacesOperations(object): - """WorkspacesOperations operations. - - You should not instantiate this class directly. Instead, you should create a Client instance that - instantiates it for you and attaches it as an attribute. - - :ivar models: Alias to model classes used in this operation group. - :type models: ~azure.mgmt.machinelearningservices.models - :param client: Client for service requests. - :param config: Configuration of service client. - :param serializer: An object model serializer. - :param deserializer: An object model deserializer. + _headers["Accept"] = _SERIALIZER.header("accept", accept, "str") + + return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs) + + +class WorkspacesOperations: + """ + .. warning:: + **DO NOT** instantiate this class directly. + + Instead, you should access the following operations through + :class:`~azure.mgmt.machinelearningservices.AzureMachineLearningServices`'s + :attr:`workspaces` attribute. """ models = _models - def __init__(self, client, config, serializer, deserializer): - self._client = client - self._serialize = serializer - self._deserialize = deserializer - self._config = config + def __init__(self, *args, **kwargs): + input_args = list(args) + self._client = input_args.pop(0) if input_args else kwargs.pop("client") + self._config = input_args.pop(0) if input_args else kwargs.pop("config") + self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") + self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list_by_subscription( - self, - skip=None, # type: Optional[str] - kind=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.WorkspaceListResult"] + self, kind: Optional[str] = None, skip: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.Workspace"]: """Lists all the available machine learning workspaces under the specified subscription. Lists all the available machine learning workspaces under the specified subscription. - :param skip: Continuation token for pagination. - :type skip: str - :param kind: Kind of workspace. + :param kind: Kind of workspace. Default value is None. :type kind: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Workspace or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_by_subscription_request( subscription_id=self._config.subscription_id, - api_version=api_version, - skip=skip, kind=kind, - template_url=self.list_by_subscription.metadata['url'], + skip=skip, + api_version=api_version, + template_url=self.list_by_subscription.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_by_subscription_request( - subscription_id=self._config.subscription_id, - api_version=api_version, - skip=skip, - kind=kind, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -651,16 +630,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -671,67 +649,74 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_by_subscription.metadata = {'url': "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces"} # type: ignore + list_by_subscription.metadata = { + "url": "/subscriptions/{subscriptionId}/providers/Microsoft.MachineLearningServices/workspaces" + } @distributed_trace def list_by_resource_group( - self, - resource_group_name, # type: str - skip=None, # type: Optional[str] - kind=None, # type: Optional[str] - **kwargs # type: Any - ): - # type: (...) -> Iterable["_models.WorkspaceListResult"] + self, resource_group_name: str, kind: Optional[str] = None, skip: Optional[str] = None, **kwargs: Any + ) -> Iterable["_models.Workspace"]: """Lists all the available machine learning workspaces under the specified resource group. Lists all the available machine learning workspaces under the specified resource group. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param skip: Continuation token for pagination. - :type skip: str - :param kind: Kind of workspace. + :param kind: Kind of workspace. Default value is None. :type kind: str + :param skip: Continuation token for pagination. Default value is None. + :type skip: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: An iterator like instance of either WorkspaceListResult or the result of cls(response) - :rtype: - ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.WorkspaceListResult] - :raises: ~azure.core.exceptions.HttpResponseError + :return: An iterator like instance of either Workspace or the result of cls(response) + :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.WorkspaceListResult] = kwargs.pop("cls", None) - cls = kwargs.pop('cls', None) # type: ClsType["_models.WorkspaceListResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + def prepare_request(next_link=None): if not next_link: - + request = build_list_by_resource_group_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, - api_version=api_version, - skip=skip, + subscription_id=self._config.subscription_id, kind=kind, - template_url=self.list_by_resource_group.metadata['url'], + skip=skip, + api_version=api_version, + template_url=self.list_by_resource_group.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: - - request = build_list_by_resource_group_request( - subscription_id=self._config.subscription_id, - resource_group_name=resource_group_name, - api_version=api_version, - skip=skip, - kind=kind, - template_url=next_link, + # make call to next link with the client's api-version + _parsed_next_link = urllib.parse.urlparse(next_link) + _next_request_params = case_insensitive_dict( + { + key: [urllib.parse.quote(v) for v in value] + for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() + } + ) + _next_request_params["api-version"] = self._config.api_version + request = HttpRequest( + "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) @@ -742,16 +727,15 @@ def extract_data(pipeline_response): deserialized = self._deserialize("WorkspaceListResult", pipeline_response) list_of_elem = deserialized.value if cls: - list_of_elem = cls(list_of_elem) + list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) response = pipeline_response.http_response @@ -762,75 +746,80 @@ def get_next(next_link=None): return pipeline_response + return ItemPaged(get_next, extract_data) - return ItemPaged( - get_next, extract_data - ) - list_by_resource_group.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces"} # type: ignore + list_by_resource_group.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces" + } def _delete_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - force_to_purge=False, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, force_to_purge: bool = False, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_delete_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_delete_request( resource_group_name=resource_group_name, workspace_name=workspace_name, - api_version=api_version, + subscription_id=self._config.subscription_id, force_to_purge=force_to_purge, - template_url=self._delete_initial.metadata['url'], + api_version=api_version, + template_url=self._delete_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - if cls: - return cls(pipeline_response, None, {}) + response_headers = {} + if response.status_code == 202: + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) - _delete_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + if cls: + return cls(pipeline_response, None, response_headers) + _delete_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } @distributed_trace - def begin_delete( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - force_to_purge=False, # type: Optional[bool] - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_delete( + self, resource_group_name: str, workspace_name: str, force_to_purge: bool = False, **kwargs: Any + ) -> LROPoller[None]: """Deletes a machine learning workspace. Deletes a machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param force_to_purge: Flag to indicate delete is a purge request. + :param force_to_purge: Flag to indicate delete is a purge request. Default value is False. :type force_to_purge: bool :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -842,91 +831,101 @@ def begin_delete( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._delete_initial( + raw_result = self._delete_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, force_to_purge=force_to_purge, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_delete.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + begin_delete.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } @distributed_trace - def get( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.Workspace" + def get(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> _models.Workspace: """Gets the properties of the specified machine learning workspace. Gets the properties of the specified machine learning workspace. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: Workspace, or the result of cls(response) + :return: Workspace or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.Workspace - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) - request = build_get_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.get.metadata['url'], + template_url=self.get.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -934,89 +933,184 @@ def get( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - get.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore - + get.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } def _update_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - body, # type: "_models.WorkspaceUpdateParameters" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.Workspace"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Workspace"]] + resource_group_name: str, + workspace_name: str, + body: Union[_models.WorkspaceUpdateParameters, IO], + **kwargs: Any + ) -> Optional[_models.Workspace]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'WorkspaceUpdateParameters') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.Workspace]] = kwargs.pop("cls", None) - request = build_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "WorkspaceUpdateParameters") + + request = build_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._update_initial.metadata['url'], + content=_content, + template_url=self._update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None if response.status_code == 200: - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - _update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + _update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + body: _models.WorkspaceUpdateParameters, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Updates a machine learning workspace with the specified parameters. + + Updates a machine learning workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: The parameters for updating a machine learning workspace. Required. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_update( + self, + resource_group_name: str, + workspace_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Updates a machine learning workspace with the specified parameters. + + Updates a machine learning workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: The parameters for updating a machine learning workspace. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_update( self, - resource_group_name, # type: str - workspace_name, # type: str - body, # type: "_models.WorkspaceUpdateParameters" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.Workspace"] + resource_group_name: str, + workspace_name: str, + body: Union[_models.WorkspaceUpdateParameters, IO], + **kwargs: Any + ) -> LROPoller[_models.Workspace]: """Updates a machine learning workspace with the specified parameters. Updates a machine learning workspace with the specified parameters. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param body: The parameters for updating a machine learning workspace. - :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters + :param body: The parameters for updating a machine learning workspace. Is either a + WorkspaceUpdateParameters type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.WorkspaceUpdateParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -1027,17 +1121,17 @@ def begin_update( Retry-After header is present. :return: An instance of LROPoller that returns either Workspace or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._update_initial( resource_group_name=resource_group_name, @@ -1045,112 +1139,133 @@ def begin_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + begin_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } def _create_or_update_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - body, # type: "_models.Workspace" - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.Workspace"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.Workspace"]] + self, resource_group_name: str, workspace_name: str, body: Union[_models.Workspace, IO], **kwargs: Any + ) -> Optional[_models.Workspace]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - _json = self._serialize.body(body, 'Workspace') + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.Workspace]] = kwargs.pop("cls", None) - request = build_create_or_update_request_initial( - subscription_id=self._config.subscription_id, + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body + else: + _json = self._serialize.body(body, "Workspace") + + request = build_create_or_update_request( resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._create_or_update_initial.metadata['url'], + content=_content, + template_url=self._create_or_update_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _create_or_update_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore - + _create_or_update_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } - @distributed_trace + @overload def begin_create_or_update( self, - resource_group_name, # type: str - workspace_name, # type: str - body, # type: "_models.Workspace" - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.Workspace"] + resource_group_name: str, + workspace_name: str, + body: _models.Workspace, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: """Creates or updates a workspace with the specified parameters. Creates or updates a workspace with the specified parameters. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param body: The parameters for creating or updating a machine learning workspace. + :param body: The parameters for creating or updating a machine learning workspace. Required. :type body: ~azure.mgmt.machinelearningservices.models.Workspace + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -1161,17 +1276,86 @@ def begin_create_or_update( Retry-After header is present. :return: An instance of LROPoller that returns either Workspace or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.Workspace"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + + @overload + def begin_create_or_update( + self, + resource_group_name: str, + workspace_name: str, + body: IO, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Creates or updates a workspace with the specified parameters. + + Creates or updates a workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: The parameters for creating or updating a machine learning workspace. Required. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @distributed_trace + def begin_create_or_update( + self, resource_group_name: str, workspace_name: str, body: Union[_models.Workspace, IO], **kwargs: Any + ) -> LROPoller[_models.Workspace]: + """Creates or updates a workspace with the specified parameters. + + Creates or updates a workspace with the specified parameters. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: The parameters for creating or updating a machine learning workspace. Is either a + Workspace type or a IO type. Required. + :type body: ~azure.mgmt.machinelearningservices.models.Workspace or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either Workspace or the result of cls(response) + :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.Workspace] + :raises ~azure.core.exceptions.HttpResponseError: + """ + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.Workspace] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._create_or_update_initial( resource_group_name=resource_group_name, @@ -1179,115 +1363,219 @@ def begin_create_or_update( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('Workspace', pipeline_response) + deserialized = self._deserialize("Workspace", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_create_or_update.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}"} # type: ignore + begin_create_or_update.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}" + } def _diagnose_initial( self, - resource_group_name, # type: str - workspace_name, # type: str - body=None, # type: Optional["_models.DiagnoseWorkspaceParameters"] - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.DiagnoseResponseResult"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.DiagnoseResponseResult"]] + resource_group_name: str, + workspace_name: str, + body: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, + **kwargs: Any + ) -> Optional[_models.DiagnoseResponseResult]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[Optional[_models.DiagnoseResponseResult]] = kwargs.pop("cls", None) - if body is not None: - _json = self._serialize.body(body, 'DiagnoseWorkspaceParameters') + content_type = content_type or "application/json" + _json = None + _content = None + if isinstance(body, (IOBase, bytes)): + _content = body else: - _json = None + if body is not None: + _json = self._serialize.body(body, "DiagnoseWorkspaceParameters") + else: + _json = None - request = build_diagnose_request_initial( - subscription_id=self._config.subscription_id, + request = build_diagnose_request( resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, - template_url=self._diagnose_initial.metadata['url'], + content=_content, + template_url=self._diagnose_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('DiagnoseResponseResult', pipeline_response) + deserialized = self._deserialize("DiagnoseResponseResult", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _diagnose_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose"} # type: ignore + _diagnose_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose" + } + + @overload + def begin_diagnose( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[_models.DiagnoseWorkspaceParameters] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DiagnoseResponseResult]: + """Diagnose workspace setup issue. + + Diagnose workspace setup issue. + + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: The parameter of diagnosing workspace health. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters + :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DiagnoseResponseResult or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.DiagnoseResponseResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ + + @overload + def begin_diagnose( + self, + resource_group_name: str, + workspace_name: str, + body: Optional[IO] = None, + *, + content_type: str = "application/json", + **kwargs: Any + ) -> LROPoller[_models.DiagnoseResponseResult]: + """Diagnose workspace setup issue. + + Diagnose workspace setup issue. + :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. + :type resource_group_name: str + :param workspace_name: Azure Machine Learning Workspace Name. Required. + :type workspace_name: str + :param body: The parameter of diagnosing workspace health. Default value is None. + :type body: IO + :keyword content_type: Body Parameter content-type. Content type parameter for binary body. + Default value is "application/json". + :paramtype content_type: str + :keyword callable cls: A custom type or function that will be passed the direct response + :keyword str continuation_token: A continuation token to restart a poller from a saved state. + :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this + operation to not poll, or pass in your own initialized polling object for a personal polling + strategy. + :paramtype polling: bool or ~azure.core.polling.PollingMethod + :keyword int polling_interval: Default waiting time between two polls for LRO operations if no + Retry-After header is present. + :return: An instance of LROPoller that returns either DiagnoseResponseResult or the result of + cls(response) + :rtype: + ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.DiagnoseResponseResult] + :raises ~azure.core.exceptions.HttpResponseError: + """ @distributed_trace def begin_diagnose( self, - resource_group_name, # type: str - workspace_name, # type: str - body=None, # type: Optional["_models.DiagnoseWorkspaceParameters"] - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.DiagnoseResponseResult"] + resource_group_name: str, + workspace_name: str, + body: Optional[Union[_models.DiagnoseWorkspaceParameters, IO]] = None, + **kwargs: Any + ) -> LROPoller[_models.DiagnoseResponseResult]: """Diagnose workspace setup issue. Diagnose workspace setup issue. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str - :param body: The parameter of diagnosing workspace health. - :type body: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters + :param body: The parameter of diagnosing workspace health. Is either a + DiagnoseWorkspaceParameters type or a IO type. Default value is None. + :type body: ~azure.mgmt.machinelearningservices.models.DiagnoseWorkspaceParameters or IO + :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. + Default value is None. + :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: By default, your polling method will be ARMPolling. Pass in False for this @@ -1300,17 +1588,17 @@ def begin_diagnose( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.DiagnoseResponseResult] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - content_type = kwargs.pop('content_type', "application/json") # type: Optional[str] - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.DiagnoseResponseResult"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) + cls: ClsType[_models.DiagnoseResponseResult] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._diagnose_initial( resource_group_name=resource_group_name, @@ -1318,41 +1606,44 @@ def begin_diagnose( body=body, api_version=api_version, content_type=content_type, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('DiagnoseResponseResult', pipeline_response) + deserialized = self._deserialize("DiagnoseResponseResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_diagnose.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose"} # type: ignore + begin_diagnose.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose" + } @distributed_trace def list_keys( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ListWorkspaceKeysResult" + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListWorkspaceKeysResult: """Lists all the keys associated with this workspace. This includes keys for the storage account, app insights and password for container registry. @@ -1360,38 +1651,46 @@ def list_keys( app insights and password for container registry. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListWorkspaceKeysResult, or the result of cls(response) + :return: ListWorkspaceKeysResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ListWorkspaceKeysResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListWorkspaceKeysResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ListWorkspaceKeysResult] = kwargs.pop("cls", None) - request = build_list_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_keys.metadata['url'], + template_url=self.list_keys.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1399,61 +1698,66 @@ def list_keys( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ListWorkspaceKeysResult', pipeline_response) + deserialized = self._deserialize("ListWorkspaceKeysResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys"} # type: ignore - + list_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listKeys" + } @distributed_trace def list_notebook_access_token( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.NotebookAccessTokenResult" + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.NotebookAccessTokenResult: """Get Azure Machine Learning Workspace notebook access token. Get Azure Machine Learning Workspace notebook access token. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: NotebookAccessTokenResult, or the result of cls(response) + :return: NotebookAccessTokenResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.NotebookAccessTokenResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.NotebookAccessTokenResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.NotebookAccessTokenResult] = kwargs.pop("cls", None) - request = build_list_notebook_access_token_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_notebook_access_token.metadata['url'], + template_url=self.list_notebook_access_token.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1461,61 +1765,66 @@ def list_notebook_access_token( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('NotebookAccessTokenResult', pipeline_response) + deserialized = self._deserialize("NotebookAccessTokenResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_notebook_access_token.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken"} # type: ignore - + list_notebook_access_token.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookAccessToken" + } @distributed_trace def list_notebook_keys( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ListNotebookKeysResult" + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListNotebookKeysResult: """Lists keys of Azure Machine Learning Workspaces notebook. Lists keys of Azure Machine Learning Workspaces notebook. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListNotebookKeysResult, or the result of cls(response) + :return: ListNotebookKeysResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ListNotebookKeysResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListNotebookKeysResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ListNotebookKeysResult] = kwargs.pop("cls", None) - request = build_list_notebook_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_notebook_keys.metadata['url'], + template_url=self.list_notebook_keys.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1523,61 +1832,66 @@ def list_notebook_keys( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ListNotebookKeysResult', pipeline_response) + deserialized = self._deserialize("ListNotebookKeysResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_notebook_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys"} # type: ignore - + list_notebook_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listNotebookKeys" + } @distributed_trace def list_storage_account_keys( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ListStorageAccountKeysResult" + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ListStorageAccountKeysResult: """Lists keys of Azure Machine Learning Workspace's storage account. Lists keys of Azure Machine Learning Workspace's storage account. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ListStorageAccountKeysResult, or the result of cls(response) + :return: ListStorageAccountKeysResult or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ListStorageAccountKeysResult - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ListStorageAccountKeysResult"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ListStorageAccountKeysResult] = kwargs.pop("cls", None) - request = build_list_storage_account_keys_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_storage_account_keys.metadata['url'], + template_url=self.list_storage_account_keys.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1585,24 +1899,21 @@ def list_storage_account_keys( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ListStorageAccountKeysResult', pipeline_response) + deserialized = self._deserialize("ListStorageAccountKeysResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_storage_account_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys"} # type: ignore - + list_storage_account_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/listStorageAccountKeys" + } @distributed_trace def list_outbound_network_dependencies_endpoints( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> "_models.ExternalFQDNResponse" + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> _models.ExternalFQDNResponse: """Called by Client (Portal, CLI, etc) to get a list of all external outbound dependencies (FQDNs) programmatically. @@ -1610,38 +1921,46 @@ def list_outbound_network_dependencies_endpoints( programmatically. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response - :return: ExternalFQDNResponse, or the result of cls(response) + :return: ExternalFQDNResponse or the result of cls(response) :rtype: ~azure.mgmt.machinelearningservices.models.ExternalFQDNResponse - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - cls = kwargs.pop('cls', None) # type: ClsType["_models.ExternalFQDNResponse"] error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) + + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.ExternalFQDNResponse] = kwargs.pop("cls", None) - request = build_list_outbound_network_dependencies_endpoints_request( - subscription_id=self._config.subscription_id, resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self.list_outbound_network_dependencies_endpoints.metadata['url'], + template_url=self.list_outbound_network_dependencies_endpoints.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200]: @@ -1649,86 +1968,88 @@ def list_outbound_network_dependencies_endpoints( error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) - deserialized = self._deserialize('ExternalFQDNResponse', pipeline_response) + deserialized = self._deserialize("ExternalFQDNResponse", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - list_outbound_network_dependencies_endpoints.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints"} # type: ignore - + list_outbound_network_dependencies_endpoints.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/outboundNetworkDependenciesEndpoints" + } def _prepare_notebook_initial( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> Optional["_models.NotebookResourceInfo"] - cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.NotebookResourceInfo"]] + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> Optional[_models.NotebookResourceInfo]: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_prepare_notebook_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[Optional[_models.NotebookResourceInfo]] = kwargs.pop("cls", None) + + request = build_prepare_notebook_request( resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._prepare_notebook_initial.metadata['url'], + template_url=self._prepare_notebook_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) deserialized = None response_headers = {} if response.status_code == 200: - deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) + deserialized = self._deserialize("NotebookResourceInfo", pipeline_response) if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, deserialized, response_headers) return deserialized - _prepare_notebook_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook"} # type: ignore - + _prepare_notebook_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook" + } @distributed_trace def begin_prepare_notebook( - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller["_models.NotebookResourceInfo"] + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> LROPoller[_models.NotebookResourceInfo]: """Prepare Azure Machine Learning Workspace's notebook resource. Prepare Azure Machine Learning Workspace's notebook resource. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -1742,105 +2063,110 @@ def begin_prepare_notebook( cls(response) :rtype: ~azure.core.polling.LROPoller[~azure.mgmt.machinelearningservices.models.NotebookResourceInfo] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType["_models.NotebookResourceInfo"] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[_models.NotebookResourceInfo] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: raw_result = self._prepare_notebook_initial( resource_group_name=resource_group_name, workspace_name=workspace_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) def get_long_running_output(pipeline_response): - response = pipeline_response.http_response - deserialized = self._deserialize('NotebookResourceInfo', pipeline_response) + deserialized = self._deserialize("NotebookResourceInfo", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_prepare_notebook.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook"} # type: ignore + begin_prepare_notebook.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/prepareNotebook" + } def _resync_keys_initial( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> None - cls = kwargs.pop('cls', None) # type: ClsType[None] + self, resource_group_name: str, workspace_name: str, **kwargs: Any + ) -> None: error_map = { - 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError + 401: ClientAuthenticationError, + 404: ResourceNotFoundError, + 409: ResourceExistsError, + 304: ResourceNotModifiedError, } - error_map.update(kwargs.pop('error_map', {})) + error_map.update(kwargs.pop("error_map", {}) or {}) - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) - - request = build_resync_keys_request_initial( - subscription_id=self._config.subscription_id, + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + + request = build_resync_keys_request( resource_group_name=resource_group_name, workspace_name=workspace_name, + subscription_id=self._config.subscription_id, api_version=api_version, - template_url=self._resync_keys_initial.metadata['url'], + template_url=self._resync_keys_initial.metadata["url"], + headers=_headers, + params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) - pipeline_response = self._client._pipeline.run( # pylint: disable=protected-access - request, - stream=False, - **kwargs + _stream = False + pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access + request, stream=_stream, **kwargs ) + response = pipeline_response.http_response if response.status_code not in [200, 202]: map_error(status_code=response.status_code, response=response, error_map=error_map) - raise HttpResponseError(response=response, error_format=ARMErrorFormat) + error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response) + raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) response_headers = {} if response.status_code == 202: - response_headers['Location']=self._deserialize('str', response.headers.get('Location')) - response_headers['Retry-After']=self._deserialize('int', response.headers.get('Retry-After')) - + response_headers["Location"] = self._deserialize("str", response.headers.get("Location")) + response_headers["Retry-After"] = self._deserialize("int", response.headers.get("Retry-After")) if cls: return cls(pipeline_response, None, response_headers) - _resync_keys_initial.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys"} # type: ignore - + _resync_keys_initial.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys" + } @distributed_trace - def begin_resync_keys( # pylint: disable=inconsistent-return-statements - self, - resource_group_name, # type: str - workspace_name, # type: str - **kwargs # type: Any - ): - # type: (...) -> LROPoller[None] + def begin_resync_keys(self, resource_group_name: str, workspace_name: str, **kwargs: Any) -> LROPoller[None]: """Resync all the keys associated with this workspace.This includes keys for the storage account, app insights and password for container registry. @@ -1848,8 +2174,9 @@ def begin_resync_keys( # pylint: disable=inconsistent-return-statements app insights and password for container registry. :param resource_group_name: The name of the resource group. The name is case insensitive. + Required. :type resource_group_name: str - :param workspace_name: Name of Azure Machine Learning workspace. + :param workspace_name: Azure Machine Learning Workspace Name. Required. :type workspace_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. @@ -1861,41 +2188,49 @@ def begin_resync_keys( # pylint: disable=inconsistent-return-statements Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] - :raises: ~azure.core.exceptions.HttpResponseError + :raises ~azure.core.exceptions.HttpResponseError: """ - api_version = kwargs.pop('api_version', "2023-08-01-preview") # type: str - polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] - cls = kwargs.pop('cls', None) # type: ClsType[None] - lro_delay = kwargs.pop( - 'polling_interval', - self._config.polling_interval - ) - cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] + _headers = kwargs.pop("headers", {}) or {} + _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) + + api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version)) + cls: ClsType[None] = kwargs.pop("cls", None) + polling: Union[bool, PollingMethod] = kwargs.pop("polling", True) + lro_delay = kwargs.pop("polling_interval", self._config.polling_interval) + cont_token: Optional[str] = kwargs.pop("continuation_token", None) if cont_token is None: - raw_result = self._resync_keys_initial( + raw_result = self._resync_keys_initial( # type: ignore resource_group_name=resource_group_name, workspace_name=workspace_name, api_version=api_version, - cls=lambda x,y,z: x, + cls=lambda x, y, z: x, + headers=_headers, + params=_params, **kwargs ) - kwargs.pop('error_map', None) + kwargs.pop("error_map", None) - def get_long_running_output(pipeline_response): + def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements if cls: return cls(pipeline_response, None, {}) - - if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs) - elif polling is False: polling_method = NoPolling() - else: polling_method = polling + if polling is True: + polling_method: PollingMethod = cast( + PollingMethod, ARMPolling(lro_delay, lro_options={"final-state-via": "location"}, **kwargs) + ) + elif polling is False: + polling_method = cast(PollingMethod, NoPolling()) + else: + polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, - deserialization_callback=get_long_running_output + deserialization_callback=get_long_running_output, ) - return LROPoller(self._client, raw_result, get_long_running_output, polling_method) + return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore - begin_resync_keys.metadata = {'url': "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys"} # type: ignore + begin_resync_keys.metadata = { + "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/resyncKeys" + } diff --git a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_set_operations.py b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_set_operations.py index c7808bb0a3b7..bbe3719cce9f 100644 --- a/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_set_operations.py +++ b/sdk/ml/azure-ai-ml/azure/ai/ml/operations/_feature_set_operations.py @@ -15,7 +15,7 @@ from azure.ai.ml._artifacts._artifact_utilities import _check_and_upload_path from azure.ai.ml._exception_helper import log_and_raise_error -from azure.ai.ml._restclient.v2023_08_01_preview import AzureMachineLearningWorkspaces as ServiceClient082023Preview +from azure.ai.ml._restclient.v2023_08_01_preview import AzureMachineLearningServices as ServiceClient082023Preview from azure.ai.ml._restclient.v2023_10_01 import AzureMachineLearningWorkspaces as ServiceClient102023 from azure.ai.ml._restclient.v2023_10_01.models import ( FeaturesetVersion, diff --git a/sdk/ml/azure-ai-ml/swagger/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/workspaceRP.json b/sdk/ml/azure-ai-ml/swagger/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/workspaceRP.json index e80c1343c870..7b038fbb8080 100644 --- a/sdk/ml/azure-ai-ml/swagger/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/workspaceRP.json +++ b/sdk/ml/azure-ai-ml/swagger/machinelearningservices/resource-manager/Microsoft.MachineLearningServices/preview/2023-08-01-preview/workspaceRP.json @@ -9,19 +9,6 @@ "schemes": [ "https" ], - "consumes": [ - "application/json" - ], - "produces": [ - "application/json" - ], - "security": [ - { - "azure_auth": [ - "user_impersonation" - ] - } - ], "paths": { "/providers/Microsoft.MachineLearningServices/operations": { "get": { @@ -42,7 +29,7 @@ "200": { "description": "The request was successful; the request was well-formed and received properly.", "schema": { - "$ref": "#/definitions/AmlOperationListResult" + "$ref": "../../../../../common-types/resource-management/v3/types.json#/definitions/OperationListResult" } }, "default": { @@ -80,10 +67,10 @@ "$ref": "../../../../../common-types/resource-management/v3/types.json#/parameters/ApiVersionParameter" }, { - "$ref": "#/parameters/PaginationParameter" + "$ref": "#/parameters/WorkspaceKindParameter" }, { - "$ref": "#/parameters/WorkspaceKindParameter" + "$ref": "#/parameters/PaginationParameter" } ], "responses": { @@ -131,10 +118,10 @@ "$ref": "../../../../../common-types/resource-management/v3/types.json#/parameters/ApiVersionParameter" }, { - "$ref": "#/parameters/PaginationParameter" + "$ref": "#/parameters/WorkspaceKindParameter" }, { - "$ref": "#/parameters/WorkspaceKindParameter" + "$ref": "#/parameters/PaginationParameter" } ], "responses": { @@ -193,7 +180,18 @@ "description": "Success" }, "202": { - "description": "Accepted" + "description": "Accepted", + "headers": { + "Location": { + "description": "URI to poll for asynchronous operation result.", + "type": "string" + }, + "Retry-After": { + "description": "Duration the client should wait between requests, in seconds.", + "type": "integer", + "format": "int32" + } + } }, "204": { "description": "No Content" @@ -210,7 +208,10 @@ "$ref": "./examples/Workspace/delete.json" } }, - "x-ms-long-running-operation": true + "x-ms-long-running-operation": true, + "x-ms-long-running-operation-options": { + "final-state-via": "location" + } }, "get": { "tags": [ @@ -513,6 +514,9 @@ { "$ref": "#/parameters/ConnectionName" }, + { + "$ref": "#/parameters/AOAIModelsToDeployParameter" + }, { "$ref": "../../../../../common-types/resource-management/v3/types.json#/parameters/ApiVersionParameter" } @@ -541,8 +545,8 @@ "tags": [ "V2WorkspaceConnectionResource" ], - "operationId": "WorkspaceConnections_Update", "summary": "Update machine learning workspaces connections under the specified workspace.", + "operationId": "WorkspaceConnections_Update", "consumes": [ "application/json" ], @@ -657,8 +661,8 @@ "tags": [ "V2WorkspaceConnectionResource" ], - "operationId": "WorkspaceConnections_ListSecrets", "summary": "List all the secrets of a machine learning workspaces connections.", + "operationId": "WorkspaceConnections_ListSecrets", "produces": [ "application/json" ], @@ -677,6 +681,9 @@ }, { "$ref": "../../../../../common-types/resource-management/v3/types.json#/parameters/ApiVersionParameter" + }, + { + "$ref": "#/parameters/AOAIModelsToDeployParameter" } ], "responses": { @@ -700,6 +707,74 @@ } } }, + "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/connections/{connectionName}/testconnection": { + "post": { + "tags": [ + "V2WorkspaceConnectionResource" + ], + "summary": "Test machine learning workspaces connections under the specified workspace.", + "operationId": "WorkspaceConnections_TestConnection", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "parameters": [ + { + "$ref": "../../../../../common-types/resource-management/v3/types.json#/parameters/SubscriptionIdParameter" + }, + { + "$ref": "../../../../../common-types/resource-management/v3/types.json#/parameters/ResourceGroupNameParameter" + }, + { + "$ref": "#/parameters/WorkspaceNameParameter" + }, + { + "$ref": "#/parameters/ConnectionName" + }, + { + "$ref": "../../../../../common-types/resource-management/v3/types.json#/parameters/ApiVersionParameter" + }, + { + "in": "body", + "name": "body", + "description": "Workspace Connection object", + "schema": { + "$ref": "#/definitions/WorkspaceConnectionPropertiesV2BasicResource" + } + } + ], + "responses": { + "202": { + "description": "The request was successful; the request was well-formed and received properly.", + "headers": { + "Location": { + "description": "URI to poll for asynchronous operation result.", + "type": "string" + }, + "Retry-After": { + "description": "Duration the client should wait between requests, in seconds.", + "type": "integer", + "format": "int32" + } + } + }, + "default": { + "description": "Error", + "schema": { + "$ref": "../../../../../common-types/resource-management/v3/types.json#/definitions/ErrorResponse" + } + } + }, + "x-ms-examples": { + "TestWorkspaceConnection": { + "$ref": "./examples/WorkspaceConnection/testConnection.json" + } + }, + "x-ms-long-running-operation": true + } + }, "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.MachineLearningServices/workspaces/{workspaceName}/diagnose": { "post": { "tags": [ @@ -1054,8 +1129,8 @@ "tags": [ "ManagedNetwork" ], - "operationId": "ManagedNetworkSettingsRule_Delete", "summary": "Deletes an outbound rule from the managed network of a machine learning workspace.", + "operationId": "ManagedNetworkSettingsRule_Delete", "produces": [ "application/json" ], @@ -1077,21 +1152,28 @@ } ], "responses": { + "200": { + "description": "Success" + }, "202": { "description": "Accepted", "headers": { "Location": { "description": "URI to poll for asynchronous operation result.", "type": "string" + }, + "Retry-After": { + "description": "Duration the client should wait between requests, in seconds.", + "type": "integer", + "format": "int32", + "maximum": 600, + "minimum": 10 } } }, "204": { "description": "No Content" }, - "200": { - "description": "The request was successful; the request was well-formed and received properly." - }, "default": { "description": "Error", "schema": { @@ -1104,14 +1186,17 @@ "$ref": "./examples/ManagedNetwork/deleteRule.json" } }, - "x-ms-long-running-operation": true + "x-ms-long-running-operation": true, + "x-ms-long-running-operation-options": { + "final-state-via": "location" + } }, "get": { "tags": [ "ManagedNetwork" ], - "operationId": "ManagedNetworkSettingsRule_Get", "summary": "Gets an outbound rule from the managed network of a machine learning workspace.", + "operationId": "ManagedNetworkSettingsRule_Get", "produces": [ "application/json" ], @@ -1156,8 +1241,8 @@ "tags": [ "ManagedNetwork" ], - "operationId": "ManagedNetworkSettingsRule_CreateOrUpdate", "summary": "Creates or updates an outbound rule in the managed network of a machine learning workspace.", + "operationId": "ManagedNetworkSettingsRule_CreateOrUpdate", "consumes": [ "application/json" ], @@ -1183,8 +1268,8 @@ { "in": "body", "name": "body", - "required": true, "description": "Outbound Rule to be created or updated in the managed network of a machine learning workspace.", + "required": true, "schema": { "$ref": "#/definitions/OutboundRuleBasicResource" } @@ -1549,8 +1634,8 @@ "tags": [ "ManagedNetwork" ], - "operationId": "ManagedNetworkProvisions_ProvisionManagedNetwork", "summary": "Provisions the managed network of a machine learning workspace.", + "operationId": "ManagedNetworkProvisions_ProvisionManagedNetwork", "consumes": [ "application/json" ], @@ -1698,44 +1783,6 @@ }, "x-ms-discriminator-value": "AccessKey" }, - "AmlOperation": { - "description": "Azure Machine Learning team account REST API operation", - "type": "object", - "properties": { - "display": { - "description": "Gets or sets display name of operation", - "$ref": "#/definitions/OperationDisplay" - }, - "isDataAction": { - "description": "Indicates whether the operation applies to data-plane", - "type": "boolean" - }, - "name": { - "description": "Gets or sets operation name: {provider}/{resource}/{operation}", - "type": "string" - }, - "origin": { - "description": "The intended executor of the operation: user/system", - "type": "string" - } - } - }, - "AmlOperationListResult": { - "type": "object", - "description": "An array of operations supported by the resource provider.", - "properties": { - "value": { - "description": "Gets or sets list of AML team account operations supported by the\r\nAML team account resource provider.", - "type": "array", - "items": { - "$ref": "#/definitions/AmlOperation" - }, - "x-ms-identifiers": [ - "name" - ] - } - } - }, "ApiKeyAuthWorkspaceConnectionProperties": { "description": "This connection type covers the generic ApiKey auth connection categories, for examples:\r\nAzureOpenAI:\r\n Category:= AzureOpenAI\r\n AuthType:= ApiKey (as type discriminator)\r\n Credentials:= {ApiKey} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey\r\n Target:= {ApiBase}\r\n \r\nCognitiveService:\r\n Category:= CognitiveService\r\n AuthType:= ApiKey (as type discriminator)\r\n Credentials:= {SubscriptionKey} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey\r\n Target:= ServiceRegion={serviceRegion}\r\n \r\nCognitiveSearch:\r\n Category:= CognitiveSearch\r\n AuthType:= ApiKey (as type discriminator)\r\n Credentials:= {Key} as Microsoft.MachineLearning.AccountRP.Contracts.WorkspaceConnection.ApiKey\r\n Target:= {Endpoint}\r\n \r\nUse Metadata property bag for ApiType, ApiVersion, Kind and other metadata fields", "type": "object", @@ -1814,8 +1861,8 @@ } }, "CustomKeys": { - "type": "object", "description": "Custom Keys credential object", + "type": "object", "properties": { "keys": { "type": "object", @@ -1835,6 +1882,7 @@ ], "properties": { "credentials": { + "description": "Custom Keys credential object", "$ref": "#/definitions/CustomKeys" } }, @@ -1873,6 +1921,11 @@ "type": "object", "additionalProperties": {} }, + "requiredResourceProviders": { + "description": "Setting for diagnosing the presence of required resource providers in the workspace.", + "type": "object", + "additionalProperties": {} + }, "resourceLock": { "description": "Setting for diagnosing resource lock", "type": "object", @@ -1982,8 +2035,8 @@ } }, "DiagnoseResult": { - "type": "object", "description": "Result of Diagnose", + "type": "object", "properties": { "code": { "description": "Code for workspace setup error", @@ -2012,13 +2065,13 @@ } }, "DiagnoseResultLevel": { + "description": "Level of workspace setup error", "enum": [ "Warning", "Error", "Information" ], "type": "string", - "description": "Level of workspace setup error", "x-ms-enum": { "name": "DiagnoseResultLevel", "modelAsString": true @@ -2159,8 +2212,8 @@ } }, "FQDNEndpointsPropertyBag": { - "type": "object", "description": "Property bag for FQDN endpoints result", + "type": "object", "properties": { "properties": { "$ref": "#/definitions/FQDNEndpoints" @@ -2182,8 +2235,8 @@ } }, "FqdnOutboundRule": { - "type": "object", "description": "FQDN Outbound Rule for the managed network of a machine learning workspace.", + "type": "object", "allOf": [ { "$ref": "#/definitions/OutboundRule" @@ -2197,8 +2250,8 @@ "x-ms-discriminator-value": "FQDN" }, "IdentityForCmk": { - "type": "object", "description": "Identity object used for encryption.", + "type": "object", "properties": { "userAssignedIdentity": { "description": "UserAssignedIdentity to be used to fetch the encryption key from keyVault", @@ -2220,12 +2273,12 @@ } }, "KeyVaultProperties": { + "description": "Customer Key vault properties.", "required": [ "keyIdentifier", "keyVaultArmId" ], "type": "object", - "description": "Customer Key vault properties.", "properties": { "identityClientId": { "description": "Currently, we support only SystemAssigned MSI.\r\nWe need this when we support UserAssignedIdentities", @@ -2350,6 +2403,7 @@ "type": "boolean" }, "status": { + "description": "Status for the managed network of a machine learning workspace.", "$ref": "#/definitions/ManagedNetworkStatus" } } @@ -2359,6 +2413,7 @@ "type": "object", "properties": { "isolationMode": { + "description": "Isolation mode for the managed network of a machine learning workspace.", "$ref": "#/definitions/IsolationMode" }, "networkId": { @@ -2368,12 +2423,21 @@ "outboundRules": { "type": "object", "additionalProperties": { + "description": "Outbound Rule for the managed network of a machine learning workspace.", "$ref": "#/definitions/OutboundRule" - }, - "x-nullable": true + } }, "status": { + "description": "Status of the Provisioning for the managed network of a machine learning workspace.", "$ref": "#/definitions/ManagedNetworkProvisionStatus" + }, + "changeableIsolationModes": { + "description": "Detail isolation modes for the managed network of a machine learning workspace.", + "type": "array", + "items": { + "$ref": "#/definitions/IsolationMode" + }, + "readOnly": true } } }, @@ -2493,49 +2557,29 @@ } } }, - "OperationDisplay": { - "description": "Display name of operation", - "type": "object", - "properties": { - "description": { - "description": "Gets or sets the description for the operation.", - "type": "string" - }, - "operation": { - "description": "Gets or sets the operation that users can perform.", - "type": "string" - }, - "provider": { - "description": "Gets or sets the resource provider name:\r\nMicrosoft.MachineLearningExperimentation", - "type": "string" - }, - "resource": { - "description": "Gets or sets the resource on which the operation is performed.", - "type": "string" - } - } - }, "OutboundRule": { - "description": "Outbound Rule for the managed network of a machine learning workspace.", + "description": "Outbound rule for the managed network of a machine learning workspace.", "required": [ "type" ], "type": "object", "properties": { "category": { + "description": "Category of a managed network outbound rule of a machine learning workspace.", "$ref": "#/definitions/RuleCategory" }, "status": { + "description": "Type of a managed network outbound rule of a machine learning workspace.", "$ref": "#/definitions/RuleStatus" }, "type": { + "description": "Type of a managed network outbound rule of a machine learning workspace.", "$ref": "#/definitions/RuleType" } }, "discriminator": "type" }, "OutboundRuleBasicResource": { - "description": "Outbound Rule Basic Resource for the managed network of a machine learning workspace.", "required": [ "properties" ], @@ -2547,6 +2591,7 @@ ], "properties": { "properties": { + "description": "Outbound Rule for the managed network of a machine learning workspace.", "$ref": "#/definitions/OutboundRule" } } @@ -2604,8 +2649,8 @@ } }, "PrivateEndpointConnection": { - "type": "object", "description": "The Private Endpoint Connection resource.", + "type": "object", "allOf": [ { "$ref": "../../../../../common-types/resource-management/v3/types.json#/definitions/Resource" @@ -2637,12 +2682,12 @@ } }, "PrivateEndpointConnectionListResult": { - "type": "object", "description": "List of private endpoint connection associated with the specified workspace", + "type": "object", "properties": { "value": { - "type": "array", "description": "Array of private endpoint connections", + "type": "array", "items": { "$ref": "#/definitions/PrivateEndpointConnection" } @@ -2666,8 +2711,6 @@ } }, "PrivateEndpointConnectionProvisioningState": { - "type": "string", - "readOnly": true, "description": "The current provisioning state.", "enum": [ "Succeeded", @@ -2675,22 +2718,25 @@ "Deleting", "Failed" ], + "type": "string", "x-ms-enum": { "name": "PrivateEndpointConnectionProvisioningState", "modelAsString": true } }, "PrivateEndpointDestination": { - "type": "object", "description": "Private Endpoint destination for a Private Endpoint Outbound Rule for the managed network of a machine learning workspace.", + "type": "object", "properties": { "serviceResourceId": { - "type": "string" + "type": "string", + "format": "arm-id" }, "sparkEnabled": { "type": "boolean" }, "sparkStatus": { + "description": "Type of a managed network Outbound Rule of a machine learning workspace.", "$ref": "#/definitions/RuleStatus" }, "subresourceTarget": { @@ -2708,6 +2754,7 @@ ], "properties": { "destination": { + "description": "Private Endpoint destination for a Private Endpoint Outbound Rule for the managed network of a machine learning workspace.", "$ref": "#/definitions/PrivateEndpointDestination" } }, @@ -2729,8 +2776,8 @@ } }, "PrivateLinkResource": { - "type": "object", "description": "A private link resource", + "type": "object", "allOf": [ { "$ref": "../../../../../common-types/resource-management/v3/types.json#/definitions/Resource" @@ -2745,6 +2792,7 @@ "type": "string" }, "properties": { + "description": "Properties of a private link resource.", "$ref": "#/definitions/PrivateLinkResourceProperties", "x-ms-client-flatten": true }, @@ -2773,8 +2821,8 @@ } }, "PrivateLinkResourceProperties": { - "type": "object", "description": "Properties of a private link resource.", + "type": "object", "properties": { "groupId": { "description": "The private link resource group id.", @@ -2797,8 +2845,8 @@ } }, "PrivateLinkServiceConnectionState": { - "type": "object", "description": "A collection of information about the state of the connection between service consumer and provider.", + "type": "object", "properties": { "actionsRequired": { "description": "Some RP chose \"None\". Other RPs use this for region expansion.", @@ -2936,6 +2984,20 @@ }, "x-ms-discriminator-value": "SAS" }, + "ServerlessComputeSettings": { + "type": "object", + "properties": { + "serverlessComputeCustomSubnet": { + "description": "The resource ID of an existing virtual network subnet in which serverless compute nodes should be deployed", + "type": "string", + "format": "arm-id" + }, + "serverlessComputeNoPublicIP": { + "description": "The flag to signal if serverless compute nodes deployed in custom vNet would have no public IP addresses for a workspace with private endpoint", + "type": "boolean" + } + } + }, "ServiceManagedResourcesSettings": { "type": "object", "properties": { @@ -2959,10 +3021,11 @@ "x-ms-discriminator-value": "ServicePrincipal" }, "ServiceTagDestination": { - "type": "object", "description": "Service Tag destination for a Service Tag Outbound Rule for the managed network of a machine learning workspace.", + "type": "object", "properties": { "action": { + "description": "The action enum for networking rule.", "$ref": "#/definitions/RuleAction" }, "addressPrefixes": { @@ -2985,8 +3048,8 @@ } }, "ServiceTagOutboundRule": { - "type": "object", "description": "Service Tag Outbound Rule for the managed network of a machine learning workspace.", + "type": "object", "allOf": [ { "$ref": "#/definitions/OutboundRule" @@ -2994,6 +3057,7 @@ ], "properties": { "destination": { + "description": "Service Tag destination for a Service Tag Outbound Rule for the managed network of a machine learning workspace.", "$ref": "#/definitions/ServiceTagDestination" } }, @@ -3007,14 +3071,15 @@ "type": "string" }, "properties": { + "description": "Properties of a shared private link resource.", "$ref": "#/definitions/SharedPrivateLinkResourceProperty", "x-ms-client-flatten": true } } }, "SharedPrivateLinkResourceProperty": { - "type": "object", "description": "Properties of a shared private link resource.", + "type": "object", "properties": { "groupId": { "description": "group id of the private link", @@ -3048,11 +3113,11 @@ "x-ms-discriminator-value": "UsernamePassword" }, "Workspace": { + "description": "An object that represents a machine learning workspace.", "required": [ "properties" ], "type": "object", - "description": "An object that represents a machine learning workspace.", "allOf": [ { "$ref": "../../../../../common-types/resource-management/v3/types.json#/definitions/Resource" @@ -3097,8 +3162,8 @@ } }, "WorkspaceConnectionApiKey": { - "type": "object", "description": "Api key object for workspace connection credential.", + "type": "object", "properties": { "key": { "type": "string" @@ -3138,10 +3203,20 @@ "description": "Category of the connection", "$ref": "#/definitions/ConnectionCategory" }, + "createdByWorkspaceArmId": { + "description": "The arm id of the workspace which created this connection", + "type": "string", + "format": "arm-id", + "readOnly": true + }, "expiryTime": { "format": "date-time", "type": "string" }, + "isSharedToAll": { + "description": "whether this connection will be shared to all the project workspace under the hub", + "type": "boolean" + }, "metadata": { "type": "object" }, @@ -3204,8 +3279,8 @@ } }, "WorkspaceConnectionUpdateParameter": { - "type": "object", "description": "The properties that the machine learning workspace connection will be updated with.", + "type": "object", "properties": { "properties": { "description": "The properties that the machine learning workspace connection will be updated with.", @@ -3225,8 +3300,8 @@ } }, "WorkspaceHubConfig": { - "type": "object", "description": "WorkspaceHub's configuration object.", + "type": "object", "properties": { "additionalWorkspaceStorageAccounts": { "type": "array", @@ -3257,8 +3332,8 @@ } }, "WorkspacePrivateEndpointResource": { - "type": "object", "description": "The Private Endpoint resource.", + "type": "object", "properties": { "id": { "description": "e.g. /subscriptions/{networkSubscriptionId}/resourceGroups/{rgName}/providers/Microsoft.Network/privateEndpoints/{privateEndpointName}", @@ -3413,6 +3488,10 @@ "description": "Whether requests from Public Network are allowed.", "$ref": "#/definitions/PublicNetworkAccessType" }, + "serverlessComputeSettings": { + "description": "Settings for serverless compute created in the workspace", + "$ref": "#/definitions/ServerlessComputeSettings" + }, "serviceManagedResourcesSettings": { "description": "The service managed resource settings.", "$ref": "#/definitions/ServiceManagedResourcesSettings" @@ -3475,6 +3554,7 @@ "type": "boolean" }, "workspaceHubConfig": { + "description": "WorkspaceHub's configuration object.", "$ref": "#/definitions/WorkspaceHubConfig" }, "workspaceId": { @@ -3532,6 +3612,10 @@ "description": "Whether requests from Public Network are allowed.", "$ref": "#/definitions/PublicNetworkAccessType" }, + "serverlessComputeSettings": { + "description": "Settings for serverless compute created in the workspace", + "$ref": "#/definitions/ServerlessComputeSettings" + }, "serviceManagedResourcesSettings": { "description": "The service managed resource settings.", "$ref": "#/definitions/ServiceManagedResourcesSettings" @@ -3581,23 +3665,22 @@ "type": "string", "x-ms-parameter-location": "method" }, + "PaginationParameter": { + "in": "query", + "name": "$skip", + "description": "Continuation token for pagination.", + "type": "string", + "x-ms-parameter-location": "method" + }, "WorkspaceNameParameter": { "in": "path", "name": "workspaceName", - "description": "Name of Azure Machine Learning workspace.", + "description": "Azure Machine Learning Workspace Name", "required": true, "type": "string", "x-ms-parameter-location": "method", "pattern": "^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" }, - "PaginationParameter": { - "in": "query", - "name": "$skip", - "type": "string", - "description": "Continuation token for pagination.", - "required": false, - "x-ms-parameter-location": "method" - }, "WorkspaceForceToPurgeParameter": { "in": "query", "name": "forceToPurge", @@ -3629,6 +3712,13 @@ "x-ms-parameter-location": "method", "pattern": "^[a-zA-Z0-9][a-zA-Z0-9_-]{2,32}$" }, + "AOAIModelsToDeployParameter": { + "in": "query", + "name": "aoaiModelsToDeploy", + "description": "query parameter for which AOAI mode should be deployed", + "type": "string", + "x-ms-parameter-location": "method" + }, "RuleName": { "in": "path", "name": "ruleName", @@ -3652,10 +3742,16 @@ "type": "oauth2", "flow": "implicit", "authorizationUrl": "https://login.microsoftonline.com/common/oauth2/authorize", - "description": "Azure Active Directory OAuth2 Flow.", "scopes": { "user_impersonation": "impersonate your user account" } } - } + }, + "security": [ + { + "azure_auth": [ + "user_impersonation" + ] + } + ] }